var/home/core/zuul-output/0000755000175000017500000000000015114545151014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114563013015470 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005253165315114563004017706 0ustar rootrootDec 05 12:09:13 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 12:09:13 crc restorecon[4694]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:13 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:09:14 crc restorecon[4694]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 12:09:14 crc kubenswrapper[4711]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 12:09:14 crc kubenswrapper[4711]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 12:09:14 crc kubenswrapper[4711]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 12:09:14 crc kubenswrapper[4711]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 12:09:14 crc kubenswrapper[4711]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 12:09:14 crc kubenswrapper[4711]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.844403 4711 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849650 4711 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849677 4711 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849683 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849690 4711 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849697 4711 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849704 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849711 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849725 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849735 4711 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849742 4711 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849749 4711 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849755 4711 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849760 4711 feature_gate.go:330] unrecognized feature gate: Example Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849766 4711 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849772 4711 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849778 4711 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849784 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849789 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849795 4711 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849800 4711 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849807 4711 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849815 4711 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849821 4711 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849827 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849833 4711 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849838 4711 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849844 4711 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849850 4711 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849856 4711 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849861 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849867 4711 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849872 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849878 4711 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849883 4711 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849889 4711 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849894 4711 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849900 4711 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849907 4711 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849912 4711 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849918 4711 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849924 4711 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849929 4711 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849935 4711 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849940 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849945 4711 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849951 4711 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849956 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849962 4711 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849969 4711 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849976 4711 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849983 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849990 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.849996 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850004 4711 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850010 4711 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850016 4711 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850022 4711 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850030 4711 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850036 4711 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850042 4711 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850047 4711 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850053 4711 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850059 4711 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850065 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850072 4711 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850078 4711 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850083 4711 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850089 4711 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850095 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850102 4711 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.850108 4711 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850465 4711 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850484 4711 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850498 4711 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850507 4711 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850516 4711 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850523 4711 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850532 4711 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850541 4711 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850547 4711 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850553 4711 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850560 4711 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850567 4711 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850573 4711 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850579 4711 flags.go:64] FLAG: --cgroup-root="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850585 4711 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850591 4711 flags.go:64] FLAG: --client-ca-file="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850598 4711 flags.go:64] FLAG: --cloud-config="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850603 4711 flags.go:64] FLAG: --cloud-provider="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850611 4711 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850619 4711 flags.go:64] FLAG: --cluster-domain="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850626 4711 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850632 4711 flags.go:64] FLAG: --config-dir="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850638 4711 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850647 4711 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850655 4711 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850665 4711 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850672 4711 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850678 4711 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850687 4711 flags.go:64] FLAG: --contention-profiling="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850693 4711 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850699 4711 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850706 4711 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850711 4711 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850722 4711 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850728 4711 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850734 4711 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850741 4711 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850747 4711 flags.go:64] FLAG: --enable-server="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850753 4711 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850762 4711 flags.go:64] FLAG: --event-burst="100" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850768 4711 flags.go:64] FLAG: --event-qps="50" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850774 4711 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850780 4711 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850786 4711 flags.go:64] FLAG: --eviction-hard="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850794 4711 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850800 4711 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850806 4711 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850812 4711 flags.go:64] FLAG: --eviction-soft="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850819 4711 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850825 4711 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850831 4711 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850837 4711 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850843 4711 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850849 4711 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850855 4711 flags.go:64] FLAG: --feature-gates="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850863 4711 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850869 4711 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850875 4711 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850882 4711 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850889 4711 flags.go:64] FLAG: --healthz-port="10248" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850896 4711 flags.go:64] FLAG: --help="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850902 4711 flags.go:64] FLAG: --hostname-override="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850908 4711 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850914 4711 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850920 4711 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850927 4711 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850934 4711 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850941 4711 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850947 4711 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850953 4711 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850960 4711 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850966 4711 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850972 4711 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850978 4711 flags.go:64] FLAG: --kube-reserved="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850984 4711 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850991 4711 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.850998 4711 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851004 4711 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851011 4711 flags.go:64] FLAG: --lock-file="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851018 4711 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851026 4711 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851034 4711 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851046 4711 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851054 4711 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851062 4711 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851069 4711 flags.go:64] FLAG: --logging-format="text" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851077 4711 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851086 4711 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851093 4711 flags.go:64] FLAG: --manifest-url="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851100 4711 flags.go:64] FLAG: --manifest-url-header="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851109 4711 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851116 4711 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851124 4711 flags.go:64] FLAG: --max-pods="110" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851130 4711 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851137 4711 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851144 4711 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851153 4711 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851163 4711 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851172 4711 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851181 4711 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851203 4711 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851211 4711 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851217 4711 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851224 4711 flags.go:64] FLAG: --pod-cidr="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851230 4711 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851241 4711 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851248 4711 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851255 4711 flags.go:64] FLAG: --pods-per-core="0" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851263 4711 flags.go:64] FLAG: --port="10250" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851271 4711 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851279 4711 flags.go:64] FLAG: --provider-id="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851287 4711 flags.go:64] FLAG: --qos-reserved="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851295 4711 flags.go:64] FLAG: --read-only-port="10255" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851303 4711 flags.go:64] FLAG: --register-node="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851311 4711 flags.go:64] FLAG: --register-schedulable="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851318 4711 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851331 4711 flags.go:64] FLAG: --registry-burst="10" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851337 4711 flags.go:64] FLAG: --registry-qps="5" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851346 4711 flags.go:64] FLAG: --reserved-cpus="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851360 4711 flags.go:64] FLAG: --reserved-memory="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851375 4711 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851409 4711 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851416 4711 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851423 4711 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851429 4711 flags.go:64] FLAG: --runonce="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851435 4711 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851441 4711 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851448 4711 flags.go:64] FLAG: --seccomp-default="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851454 4711 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851461 4711 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851468 4711 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851475 4711 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851481 4711 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851487 4711 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851494 4711 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851500 4711 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851506 4711 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851512 4711 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851519 4711 flags.go:64] FLAG: --system-cgroups="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851525 4711 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851536 4711 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851542 4711 flags.go:64] FLAG: --tls-cert-file="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851550 4711 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851558 4711 flags.go:64] FLAG: --tls-min-version="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851564 4711 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851570 4711 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851578 4711 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851584 4711 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851591 4711 flags.go:64] FLAG: --v="2" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851600 4711 flags.go:64] FLAG: --version="false" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851609 4711 flags.go:64] FLAG: --vmodule="" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851623 4711 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.851630 4711 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851784 4711 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851792 4711 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851798 4711 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851804 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851810 4711 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851816 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851822 4711 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851828 4711 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851833 4711 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851840 4711 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851846 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851851 4711 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851856 4711 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851861 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851867 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851872 4711 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851878 4711 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851884 4711 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851889 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851895 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851900 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851905 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851911 4711 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851916 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851921 4711 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851927 4711 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851933 4711 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851938 4711 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851944 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851949 4711 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851955 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851960 4711 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851966 4711 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851971 4711 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851976 4711 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851982 4711 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851987 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851993 4711 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.851998 4711 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852003 4711 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852010 4711 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852027 4711 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852034 4711 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852040 4711 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852046 4711 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852052 4711 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852058 4711 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852064 4711 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852072 4711 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852079 4711 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852085 4711 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852091 4711 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852097 4711 feature_gate.go:330] unrecognized feature gate: Example Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852103 4711 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852110 4711 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852115 4711 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852121 4711 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852127 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852133 4711 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852138 4711 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852145 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852153 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852161 4711 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852169 4711 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852177 4711 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852185 4711 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852193 4711 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852200 4711 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852207 4711 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852213 4711 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.852220 4711 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.852243 4711 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.860728 4711 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.860778 4711 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860848 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860857 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860863 4711 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860867 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860871 4711 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860875 4711 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860879 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860883 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860888 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860891 4711 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860895 4711 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860898 4711 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860902 4711 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860905 4711 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860909 4711 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860912 4711 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860916 4711 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860920 4711 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860923 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860927 4711 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860930 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860933 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860937 4711 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860941 4711 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860944 4711 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860949 4711 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860954 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860958 4711 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860962 4711 feature_gate.go:330] unrecognized feature gate: Example Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860965 4711 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860969 4711 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860973 4711 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860976 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860980 4711 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860987 4711 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860991 4711 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.860996 4711 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861001 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861005 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861010 4711 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861014 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861019 4711 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861023 4711 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861027 4711 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861031 4711 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861035 4711 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861039 4711 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861044 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861047 4711 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861052 4711 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861059 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861063 4711 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861067 4711 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861072 4711 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861076 4711 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861080 4711 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861084 4711 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861088 4711 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861092 4711 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861096 4711 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861100 4711 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861103 4711 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861107 4711 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861110 4711 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861114 4711 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861117 4711 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861121 4711 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861125 4711 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861128 4711 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861132 4711 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861137 4711 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.861145 4711 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861275 4711 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861284 4711 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861291 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861295 4711 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861298 4711 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861302 4711 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861306 4711 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861309 4711 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861313 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861317 4711 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861323 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861327 4711 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861332 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861336 4711 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861340 4711 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861343 4711 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861346 4711 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861350 4711 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861354 4711 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861357 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861360 4711 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861364 4711 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861367 4711 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861370 4711 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861374 4711 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861377 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861398 4711 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861402 4711 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861406 4711 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861409 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861413 4711 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861416 4711 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861420 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861423 4711 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861428 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861431 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861435 4711 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861439 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861442 4711 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861446 4711 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861450 4711 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861455 4711 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861459 4711 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861462 4711 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861467 4711 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861470 4711 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861474 4711 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861477 4711 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861481 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861484 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861488 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861491 4711 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861495 4711 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861499 4711 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861503 4711 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861506 4711 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861509 4711 feature_gate.go:330] unrecognized feature gate: Example Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861513 4711 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861516 4711 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861520 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861523 4711 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861528 4711 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861532 4711 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861536 4711 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861540 4711 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861543 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861547 4711 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861550 4711 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861553 4711 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861557 4711 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.861561 4711 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.861568 4711 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.862008 4711 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.865166 4711 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.865254 4711 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.866300 4711 server.go:997] "Starting client certificate rotation" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.866328 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.866698 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-28 14:20:41.894348422 +0000 UTC Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.866835 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.872855 4711 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.892224 4711 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 12:09:14 crc kubenswrapper[4711]: E1205 12:09:14.893238 4711 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.927687 4711 log.go:25] "Validated CRI v1 runtime API" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.942524 4711 log.go:25] "Validated CRI v1 image API" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.945158 4711 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.948326 4711 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-12-03-06-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.948364 4711 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:45 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.982315 4711 manager.go:217] Machine: {Timestamp:2025-12-05 12:09:14.960139452 +0000 UTC m=+0.544461812 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654132736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:5acb70a7-4bdd-4a98-adeb-a7abc79182a9 BootID:728a4e24-f371-4cb9-97e1-b9890e024e7d Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:45 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730829824 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827068416 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:60:fa:3d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:60:fa:3d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:c6:3c:51 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:db:5a:bf Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:a5:51:e1 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:ab:69:92 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:e6:4f:9e:11:df:e0 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:9a:5b:28:ed:a2:ca Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654132736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.983147 4711 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.983531 4711 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.987175 4711 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.987714 4711 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.987799 4711 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.988530 4711 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.988568 4711 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.988690 4711 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.988734 4711 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.989092 4711 state_mem.go:36] "Initialized new in-memory state store" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.989754 4711 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.990934 4711 kubelet.go:418] "Attempting to sync node with API server" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.990975 4711 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.991005 4711 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.991030 4711 kubelet.go:324] "Adding apiserver pod source" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.991051 4711 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.993555 4711 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.994331 4711 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995114 4711 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995839 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995864 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995872 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995880 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995892 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995899 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995907 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995919 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995929 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995939 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995951 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.995958 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.995923 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:14 crc kubenswrapper[4711]: W1205 12:09:14.995932 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:14 crc kubenswrapper[4711]: E1205 12:09:14.996025 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:14 crc kubenswrapper[4711]: E1205 12:09:14.996043 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.996243 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.996741 4711 server.go:1280] "Started kubelet" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.997110 4711 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.997246 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.997103 4711 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.998119 4711 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 12:09:14 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.999115 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.999310 4711 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.999469 4711 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.999492 4711 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 12:09:14 crc kubenswrapper[4711]: I1205 12:09:14.999567 4711 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:14.999428 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 20:11:40.621210247 +0000 UTC Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.000032 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 896h2m25.621183512s for next certificate rotation Dec 05 12:09:15 crc kubenswrapper[4711]: W1205 12:09:15.000366 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.000447 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.000826 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.002344 4711 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.002405 4711 factory.go:55] Registering systemd factory Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.002460 4711 factory.go:221] Registration of the systemd container factory successfully Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.003201 4711 factory.go:153] Registering CRI-O factory Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.003227 4711 factory.go:221] Registration of the crio container factory successfully Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.003266 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="200ms" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.003315 4711 factory.go:103] Registering Raw factory Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.003370 4711 manager.go:1196] Started watching for new ooms in manager Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.004426 4711 manager.go:319] Starting recovery of all containers Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052587 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052664 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052675 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052692 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052709 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052721 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052771 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052784 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052797 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052816 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052827 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052837 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052849 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052864 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052877 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052887 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052897 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052906 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052921 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052931 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052943 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052972 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.052981 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.046531 4711 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.38:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e50721bae224e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:09:14.996703822 +0000 UTC m=+0.581026152,LastTimestamp:2025-12-05 12:09:14.996703822 +0000 UTC m=+0.581026152,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.053483 4711 server.go:460] "Adding debug handlers to kubelet server" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054369 4711 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054490 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054528 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054551 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054590 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054613 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054683 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054704 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054722 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054742 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054774 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054798 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054819 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054844 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054866 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054887 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054909 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054931 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054953 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.054974 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055001 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055024 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055060 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055084 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055109 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055132 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055154 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055176 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055201 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055225 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055255 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055363 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055422 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055452 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055478 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055541 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055565 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055600 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055664 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055700 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055733 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055757 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055778 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055806 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055832 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055856 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055889 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055917 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055939 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.055976 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056004 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056033 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056059 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056084 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056113 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056155 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056188 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056214 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056239 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056273 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056298 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056323 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056348 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056383 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056437 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056463 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056530 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056568 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056592 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056617 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056646 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056682 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056709 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056731 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056776 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056800 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056830 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056865 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056936 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056961 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.056985 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057016 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057053 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057079 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057105 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057236 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057273 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057302 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057337 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057362 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057411 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057439 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057465 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057535 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057568 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057590 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057612 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057634 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057654 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057677 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057724 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057750 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057780 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057806 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057831 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057867 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057893 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057917 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057943 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.057970 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058000 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058022 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058055 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058091 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058151 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058178 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058203 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058237 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058272 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058297 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058323 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058405 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058433 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058455 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058479 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058517 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058539 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058562 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058586 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058607 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058629 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058657 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058679 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058700 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058766 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058796 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058829 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058858 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058893 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058916 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058941 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.058957 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059003 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059023 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059047 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059080 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059112 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059136 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059167 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059196 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059223 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059250 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059279 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059301 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059322 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059344 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059431 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059465 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059489 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059512 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059546 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059568 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059597 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059619 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059640 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059660 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059731 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059893 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059942 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.059965 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060008 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060028 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060073 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060093 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060170 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060192 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060212 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060231 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060250 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060270 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060292 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060313 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060489 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060525 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060671 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060694 4711 reconstruct.go:97] "Volume reconstruction finished" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.060707 4711 reconciler.go:26] "Reconciler: start to sync state" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.070933 4711 manager.go:324] Recovery completed Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.081037 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.083064 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.083116 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.083128 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.084069 4711 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.084092 4711 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.084113 4711 state_mem.go:36] "Initialized new in-memory state store" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.101851 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.202321 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.204210 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="400ms" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.303167 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.403735 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.504902 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.605157 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.605744 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="800ms" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.705694 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.806840 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.907169 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:15 crc kubenswrapper[4711]: W1205 12:09:15.957006 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:15 crc kubenswrapper[4711]: E1205 12:09:15.957130 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:15 crc kubenswrapper[4711]: I1205 12:09:15.998839 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.008085 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.108368 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.208847 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: W1205 12:09:16.274768 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.274860 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.310022 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.406646 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="1.6s" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.410832 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.511835 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: W1205 12:09:16.587713 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.587907 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.612659 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.713004 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.814179 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.914854 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:16 crc kubenswrapper[4711]: I1205 12:09:16.953192 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 12:09:16 crc kubenswrapper[4711]: E1205 12:09:16.955532 4711 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:16 crc kubenswrapper[4711]: I1205 12:09:16.998727 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.014988 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.115277 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.215674 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.316051 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.417048 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.517359 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.617526 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.718012 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.818840 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: E1205 12:09:17.919201 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:17 crc kubenswrapper[4711]: I1205 12:09:17.998793 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.007897 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="3.2s" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.020143 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.120510 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.221194 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.321957 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.422085 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.523095 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.623707 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: I1205 12:09:18.679006 4711 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 12:09:18 crc kubenswrapper[4711]: I1205 12:09:18.681736 4711 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 12:09:18 crc kubenswrapper[4711]: I1205 12:09:18.681825 4711 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 12:09:18 crc kubenswrapper[4711]: I1205 12:09:18.681880 4711 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.682114 4711 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.724597 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: W1205 12:09:18.738685 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.738801 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:18 crc kubenswrapper[4711]: W1205 12:09:18.758309 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.758511 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.782994 4711 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.825289 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: W1205 12:09:18.924686 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.924782 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.926181 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:18 crc kubenswrapper[4711]: I1205 12:09:18.939134 4711 policy_none.go:49] "None policy: Start" Dec 05 12:09:18 crc kubenswrapper[4711]: I1205 12:09:18.940017 4711 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 12:09:18 crc kubenswrapper[4711]: I1205 12:09:18.940057 4711 state_mem.go:35] "Initializing new in-memory state store" Dec 05 12:09:18 crc kubenswrapper[4711]: E1205 12:09:18.983367 4711 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 05 12:09:18 crc kubenswrapper[4711]: I1205 12:09:18.998658 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:19 crc kubenswrapper[4711]: E1205 12:09:19.026715 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:19 crc kubenswrapper[4711]: W1205 12:09:19.102019 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:19 crc kubenswrapper[4711]: E1205 12:09:19.102138 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:19 crc kubenswrapper[4711]: E1205 12:09:19.127321 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:19 crc kubenswrapper[4711]: E1205 12:09:19.227759 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.253935 4711 manager.go:334] "Starting Device Plugin manager" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.254153 4711 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.254181 4711 server.go:79] "Starting device plugin registration server" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.254805 4711 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.254837 4711 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.254982 4711 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.255112 4711 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.255123 4711 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 12:09:19 crc kubenswrapper[4711]: E1205 12:09:19.263457 4711 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.355021 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.356676 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.356911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.356925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.356956 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:09:19 crc kubenswrapper[4711]: E1205 12:09:19.357721 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.38:6443: connect: connection refused" node="crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.384339 4711 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.384506 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.386055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.386096 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.386109 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.386244 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.386513 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.386557 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.387110 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.387176 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.387206 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.387255 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.387282 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.387292 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.387433 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.387785 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.387895 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.388933 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.388997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.389020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.389218 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.389341 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.389417 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.389438 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.389685 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.389754 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.391802 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.391838 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.391850 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.392023 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.392043 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.392098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.392124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.392176 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.392241 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.393208 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.393262 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.393281 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.393622 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.393718 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.394700 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.394747 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.394770 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.395422 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.395469 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.395481 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420518 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420565 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420590 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420608 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420627 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420643 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420659 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420688 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420723 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420755 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420774 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420787 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420802 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420830 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.420859 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522285 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522377 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522523 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522579 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522557 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522773 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522840 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522864 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522912 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522948 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.522966 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523010 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523016 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523042 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523068 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523066 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523128 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523158 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523135 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523210 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523243 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523298 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523331 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523344 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523366 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523423 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523438 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523497 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523527 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.523627 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.558647 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.560653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.560724 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.560744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.560782 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:09:19 crc kubenswrapper[4711]: E1205 12:09:19.561766 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.38:6443: connect: connection refused" node="crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.710255 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.739767 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: W1205 12:09:19.743594 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-4b6fa609cb64b8facaa1aefb827913ba814e405e3d1a3b74e0c597c6c092fd06 WatchSource:0}: Error finding container 4b6fa609cb64b8facaa1aefb827913ba814e405e3d1a3b74e0c597c6c092fd06: Status 404 returned error can't find the container with id 4b6fa609cb64b8facaa1aefb827913ba814e405e3d1a3b74e0c597c6c092fd06 Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.750167 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.775202 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.787353 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:09:19 crc kubenswrapper[4711]: W1205 12:09:19.831721 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-383fba87e80bcdd2703b40d9ebbf3cacee0fddc6716c0db00b55600cf51d1e27 WatchSource:0}: Error finding container 383fba87e80bcdd2703b40d9ebbf3cacee0fddc6716c0db00b55600cf51d1e27: Status 404 returned error can't find the container with id 383fba87e80bcdd2703b40d9ebbf3cacee0fddc6716c0db00b55600cf51d1e27 Dec 05 12:09:19 crc kubenswrapper[4711]: W1205 12:09:19.848520 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-f0f463ad7e4e247fa9283ca9920f042811be5c0bcae23fb064aa51fa91ac4411 WatchSource:0}: Error finding container f0f463ad7e4e247fa9283ca9920f042811be5c0bcae23fb064aa51fa91ac4411: Status 404 returned error can't find the container with id f0f463ad7e4e247fa9283ca9920f042811be5c0bcae23fb064aa51fa91ac4411 Dec 05 12:09:19 crc kubenswrapper[4711]: W1205 12:09:19.851098 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-b10afe18f1be78c4d7a8b7b0a32532d1fcbcd0d94d9a96a088dda4ac9d9805fb WatchSource:0}: Error finding container b10afe18f1be78c4d7a8b7b0a32532d1fcbcd0d94d9a96a088dda4ac9d9805fb: Status 404 returned error can't find the container with id b10afe18f1be78c4d7a8b7b0a32532d1fcbcd0d94d9a96a088dda4ac9d9805fb Dec 05 12:09:19 crc kubenswrapper[4711]: W1205 12:09:19.861686 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-5deb4462f6ca108fea26a047fc276bca9e56d0f694f12ac7a0c4bf189152107d WatchSource:0}: Error finding container 5deb4462f6ca108fea26a047fc276bca9e56d0f694f12ac7a0c4bf189152107d: Status 404 returned error can't find the container with id 5deb4462f6ca108fea26a047fc276bca9e56d0f694f12ac7a0c4bf189152107d Dec 05 12:09:19 crc kubenswrapper[4711]: W1205 12:09:19.912336 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:19 crc kubenswrapper[4711]: E1205 12:09:19.912513 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.961977 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.964177 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.964244 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.964266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.964306 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:09:19 crc kubenswrapper[4711]: E1205 12:09:19.964953 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.38:6443: connect: connection refused" node="crc" Dec 05 12:09:19 crc kubenswrapper[4711]: I1205 12:09:19.998624 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.690646 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f0f463ad7e4e247fa9283ca9920f042811be5c0bcae23fb064aa51fa91ac4411"} Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.692237 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"383fba87e80bcdd2703b40d9ebbf3cacee0fddc6716c0db00b55600cf51d1e27"} Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.693307 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4b6fa609cb64b8facaa1aefb827913ba814e405e3d1a3b74e0c597c6c092fd06"} Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.694642 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5deb4462f6ca108fea26a047fc276bca9e56d0f694f12ac7a0c4bf189152107d"} Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.696368 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b10afe18f1be78c4d7a8b7b0a32532d1fcbcd0d94d9a96a088dda4ac9d9805fb"} Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.765827 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.767560 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.767618 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.767631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.767665 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:09:20 crc kubenswrapper[4711]: E1205 12:09:20.768123 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.38:6443: connect: connection refused" node="crc" Dec 05 12:09:20 crc kubenswrapper[4711]: I1205 12:09:20.998772 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.087870 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 12:09:21 crc kubenswrapper[4711]: E1205 12:09:21.089176 4711 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:21 crc kubenswrapper[4711]: E1205 12:09:21.209692 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="6.4s" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.705688 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac" exitCode=0 Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.705780 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac"} Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.705798 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.706797 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.706831 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.706840 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.707353 4711 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749" exitCode=0 Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.707420 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749"} Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.707523 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.708853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.708890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.708900 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.710316 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.710403 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e"} Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.710431 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f"} Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.711062 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.711086 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.711096 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.712110 4711 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038" exitCode=0 Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.712149 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038"} Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.712278 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.713079 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.713112 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.713125 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.715258 4711 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="5a3a663be5e2f0bea8ec1a0a593ece380683f187263f85f80d996e914aea4e48" exitCode=0 Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.715303 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"5a3a663be5e2f0bea8ec1a0a593ece380683f187263f85f80d996e914aea4e48"} Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.715461 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.716982 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.717015 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.717024 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:21 crc kubenswrapper[4711]: W1205 12:09:21.963240 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:21 crc kubenswrapper[4711]: E1205 12:09:21.963715 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:21 crc kubenswrapper[4711]: I1205 12:09:21.999369 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.368751 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.376217 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.376261 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.376272 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.376304 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:09:22 crc kubenswrapper[4711]: E1205 12:09:22.376881 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.38:6443: connect: connection refused" node="crc" Dec 05 12:09:22 crc kubenswrapper[4711]: W1205 12:09:22.697339 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:22 crc kubenswrapper[4711]: E1205 12:09:22.697472 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.720325 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.720413 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.720419 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.721328 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.721375 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.721406 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.721849 4711 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686" exitCode=0 Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.721950 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.722047 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.723106 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.723130 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.723142 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.723540 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"7baf11a8cb36d32397e598bf52c94ff8718477a122ffb89b316b9eae2825b1ce"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.723633 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.724937 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.724964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.724973 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.727625 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.727664 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.727680 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.732880 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.732951 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.732964 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73"} Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.733015 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.734111 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.734152 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.734166 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:22 crc kubenswrapper[4711]: W1205 12:09:22.786462 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:22 crc kubenswrapper[4711]: E1205 12:09:22.786588 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:22 crc kubenswrapper[4711]: W1205 12:09:22.929824 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:22 crc kubenswrapper[4711]: E1205 12:09:22.930130 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.38:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:09:22 crc kubenswrapper[4711]: I1205 12:09:22.998533 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.38:6443: connect: connection refused Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.558609 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.744644 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391"} Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.744707 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50"} Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.744852 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.746054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.746097 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.746110 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.746981 4711 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2" exitCode=0 Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.747032 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2"} Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.747113 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.747127 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.747165 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.747178 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.747197 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.748632 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.748668 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.748682 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.749044 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.749075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.749085 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.749104 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.749129 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.749141 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.749139 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.749171 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:23 crc kubenswrapper[4711]: I1205 12:09:23.749192 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.450330 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.753317 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687"} Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.753405 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575"} Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.753421 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be"} Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.753432 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787"} Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.753460 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.753508 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.753460 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.754370 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.754421 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.754434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.754371 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.754626 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.754643 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.883750 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:24 crc kubenswrapper[4711]: I1205 12:09:24.889878 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.360898 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.450933 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.451112 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.452484 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.452551 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.452564 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.577801 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.579663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.579743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.579768 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.579816 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.761014 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8"} Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.761093 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.761183 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.761186 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.762111 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.762151 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.762162 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.762420 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.762474 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.762499 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.762438 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.762545 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:25 crc kubenswrapper[4711]: I1205 12:09:25.762555 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.406549 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.763599 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.763725 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.763798 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.763890 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.764753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.764842 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.764882 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.765297 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.765331 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.765299 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.765377 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.765435 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:26 crc kubenswrapper[4711]: I1205 12:09:26.765345 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:27 crc kubenswrapper[4711]: I1205 12:09:27.450856 4711 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 12:09:27 crc kubenswrapper[4711]: I1205 12:09:27.451017 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 12:09:27 crc kubenswrapper[4711]: I1205 12:09:27.766058 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:27 crc kubenswrapper[4711]: I1205 12:09:27.767568 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:27 crc kubenswrapper[4711]: I1205 12:09:27.767602 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:27 crc kubenswrapper[4711]: I1205 12:09:27.767613 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:28 crc kubenswrapper[4711]: I1205 12:09:28.537992 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:28 crc kubenswrapper[4711]: I1205 12:09:28.538279 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:28 crc kubenswrapper[4711]: I1205 12:09:28.539935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:28 crc kubenswrapper[4711]: I1205 12:09:28.540149 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:28 crc kubenswrapper[4711]: I1205 12:09:28.540290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:29 crc kubenswrapper[4711]: E1205 12:09:29.263712 4711 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 12:09:29 crc kubenswrapper[4711]: I1205 12:09:29.775102 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 12:09:30 crc kubenswrapper[4711]: I1205 12:09:30.109673 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 12:09:30 crc kubenswrapper[4711]: I1205 12:09:30.109987 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:30 crc kubenswrapper[4711]: I1205 12:09:30.111421 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:30 crc kubenswrapper[4711]: I1205 12:09:30.111456 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:30 crc kubenswrapper[4711]: I1205 12:09:30.111467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:31 crc kubenswrapper[4711]: I1205 12:09:31.627695 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 12:09:31 crc kubenswrapper[4711]: I1205 12:09:31.627990 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:31 crc kubenswrapper[4711]: I1205 12:09:31.629355 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:31 crc kubenswrapper[4711]: I1205 12:09:31.629415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:31 crc kubenswrapper[4711]: I1205 12:09:31.629439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:34 crc kubenswrapper[4711]: I1205 12:09:34.000294 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 05 12:09:34 crc kubenswrapper[4711]: E1205 12:09:34.722520 4711 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.187e50721bae224e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:09:14.996703822 +0000 UTC m=+0.581026152,LastTimestamp:2025-12-05 12:09:14.996703822 +0000 UTC m=+0.581026152,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 12:09:34 crc kubenswrapper[4711]: I1205 12:09:34.733092 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 12:09:34 crc kubenswrapper[4711]: I1205 12:09:34.733181 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 12:09:34 crc kubenswrapper[4711]: I1205 12:09:34.737645 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 12:09:34 crc kubenswrapper[4711]: I1205 12:09:34.737723 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.417103 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.417317 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.418597 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.418638 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.418649 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.421835 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.787236 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.788241 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.788295 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:36 crc kubenswrapper[4711]: I1205 12:09:36.788308 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:37 crc kubenswrapper[4711]: I1205 12:09:37.451762 4711 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 12:09:37 crc kubenswrapper[4711]: I1205 12:09:37.451993 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 12:09:38 crc kubenswrapper[4711]: I1205 12:09:38.543994 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:38 crc kubenswrapper[4711]: I1205 12:09:38.544176 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:38 crc kubenswrapper[4711]: I1205 12:09:38.545576 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:38 crc kubenswrapper[4711]: I1205 12:09:38.545639 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:38 crc kubenswrapper[4711]: I1205 12:09:38.545652 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:39 crc kubenswrapper[4711]: E1205 12:09:39.263964 4711 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 12:09:39 crc kubenswrapper[4711]: E1205 12:09:39.736867 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="7s" Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.745336 4711 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 12:09:39 crc kubenswrapper[4711]: E1205 12:09:39.748190 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.748237 4711 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.748277 4711 trace.go:236] Trace[490438822]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 12:09:27.570) (total time: 12177ms): Dec 05 12:09:39 crc kubenswrapper[4711]: Trace[490438822]: ---"Objects listed" error: 12177ms (12:09:39.748) Dec 05 12:09:39 crc kubenswrapper[4711]: Trace[490438822]: [12.177322645s] [12.177322645s] END Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.748303 4711 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.755739 4711 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.765317 4711 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.770054 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38372->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.770142 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38372->192.168.126.11:17697: read: connection reset by peer" Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.770514 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38380->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.770544 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38380->192.168.126.11:17697: read: connection reset by peer" Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.770762 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.770858 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.800348 4711 csr.go:261] certificate signing request csr-pdn2w is approved, waiting to be issued Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.807835 4711 csr.go:257] certificate signing request csr-pdn2w is issued Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.827251 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.829220 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391" exitCode=255 Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.829278 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391"} Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.860858 4711 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 12:09:39 crc kubenswrapper[4711]: I1205 12:09:39.949172 4711 scope.go:117] "RemoveContainer" containerID="dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.005465 4711 apiserver.go:52] "Watching apiserver" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.009792 4711 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.010294 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.010990 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.011095 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.011158 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.011170 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.011180 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.011726 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.011802 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.011823 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.012045 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.012946 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.015512 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.015751 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.015782 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.015891 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.015969 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.016120 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.016229 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.016235 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.044605 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.060004 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.074764 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.091088 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.100327 4711 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.106073 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.119520 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.134217 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.149880 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158207 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158260 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158285 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158304 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158326 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158343 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158359 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158379 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158415 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158432 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158452 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158468 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158489 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158506 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158522 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158540 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158557 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158578 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158597 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158614 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158661 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158677 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158695 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158712 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158749 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158768 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158784 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158763 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158817 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158932 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.158976 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159004 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159051 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159055 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159077 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159060 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159123 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159146 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159173 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159228 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159251 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159254 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159308 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159332 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159371 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159414 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159467 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159486 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159505 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159523 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159557 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159569 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159616 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159638 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159657 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159681 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159699 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159718 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159735 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159752 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159799 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159817 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159861 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159864 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159884 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159904 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159923 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159911 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.159943 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160024 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160031 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160051 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160080 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160101 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160117 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160117 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160173 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160171 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160181 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160192 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160215 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160233 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160253 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160263 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160270 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160304 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160329 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160350 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160367 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160369 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160409 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160449 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160497 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160520 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160537 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160567 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160587 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160610 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160621 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160628 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160649 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160668 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160678 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160704 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160727 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160743 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160750 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160788 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160809 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160808 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160819 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160826 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160857 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160857 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160877 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160894 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160951 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160968 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.160984 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161002 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161021 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161035 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161040 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161085 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161100 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161110 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161150 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161199 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161212 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161223 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161226 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161238 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161252 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161273 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161275 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161293 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161315 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161335 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161356 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161373 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161408 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161413 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161427 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161448 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161469 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161490 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161508 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161528 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161546 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161563 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161579 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161598 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161615 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161632 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161650 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161669 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161685 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161703 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161719 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161735 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161753 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161774 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161792 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161808 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161824 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161841 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161858 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161877 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161894 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161913 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161929 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161945 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161962 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162006 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162025 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162043 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162062 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162157 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162178 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162198 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162220 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162241 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162261 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162281 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162301 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162321 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162341 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162364 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162414 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162432 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162454 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162472 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162490 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162511 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162530 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162546 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162565 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162584 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162604 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162689 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162723 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162742 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162759 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162776 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162794 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162812 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162829 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162848 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166826 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166869 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166891 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166920 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166959 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166983 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167014 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167044 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167070 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167092 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167113 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167144 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167163 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167187 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167217 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167279 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167312 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167336 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167361 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167401 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167423 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167446 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161445 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167463 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167460 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161636 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161837 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162077 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162171 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162291 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162446 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162486 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162683 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163026 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163114 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.162935 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163199 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163601 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163691 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163695 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163816 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163834 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163874 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163913 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167564 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.163925 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164061 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164113 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164292 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164313 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164352 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164446 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164471 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164655 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164665 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164699 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164792 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164840 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.164882 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.165819 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.165930 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166295 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166464 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166609 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166658 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166739 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.166866 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167078 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167176 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167298 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167421 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167824 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167843 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.167865 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.168233 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.168421 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.168463 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.168538 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.168815 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.168964 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.169499 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.169594 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.169676 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.169879 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170081 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170136 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170160 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170313 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170435 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170446 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170668 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170683 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170924 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.170958 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.171157 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.171245 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.171370 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.171562 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.171611 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.171696 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.171827 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.171954 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.172102 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.172129 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.172158 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.172210 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.172684 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.172962 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.173041 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.173071 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.173322 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.173818 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.174351 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.175117 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.175250 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.175523 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.175699 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.175764 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.175816 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.175873 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.176049 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.176346 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.176729 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.177157 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.177520 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.177635 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.178037 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.178188 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.178376 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.178761 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.179220 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.179258 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.179321 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.179371 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.179485 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.179664 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.180130 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.180271 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.180331 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.180517 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.180608 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.180604 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.180878 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.181044 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.181077 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.181308 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.181428 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.181531 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.181725 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.182202 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.182464 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.182714 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.161610 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.182876 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.183092 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.183146 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.183208 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.183331 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.183799 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.181680 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.184758 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.185048 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.185107 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.185321 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.185550 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.185627 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.185754 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.186205 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.186194 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.187184 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.187297 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.187877 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.187879 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.188305 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.184439 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.188445 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.188925 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.189526 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.190111 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.189711 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.190616 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.190661 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.191457 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:40.685914625 +0000 UTC m=+26.270236955 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.191511 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.191548 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.191574 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.191617 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:40.691600244 +0000 UTC m=+26.275922704 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192084 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192140 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192272 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192289 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192304 4711 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192317 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192331 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192345 4711 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192356 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192371 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192381 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192413 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192424 4711 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192436 4711 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192448 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192468 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192480 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192493 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192504 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192517 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192529 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192531 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192611 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192541 4711 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192669 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192682 4711 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192710 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192720 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192731 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192742 4711 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192754 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192764 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192773 4711 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192782 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192793 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192802 4711 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192812 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192821 4711 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192830 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192839 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192849 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192858 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192869 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192881 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192890 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192900 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192909 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192918 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192927 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192936 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192946 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192955 4711 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192964 4711 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192974 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.192989 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193000 4711 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193010 4711 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193021 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193030 4711 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193040 4711 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193049 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193059 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193068 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193078 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193086 4711 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193095 4711 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193104 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193112 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193121 4711 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193130 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193138 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193147 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193156 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193165 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193174 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193182 4711 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193192 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193200 4711 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193209 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193219 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193228 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193237 4711 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193248 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193258 4711 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193268 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193278 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193290 4711 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193301 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193315 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193327 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193338 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193348 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193359 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193372 4711 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193410 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193423 4711 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193434 4711 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193443 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193453 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193461 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193470 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193480 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193489 4711 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193500 4711 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193510 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193519 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193529 4711 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193538 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193548 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193557 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193567 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193577 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193586 4711 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193596 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193606 4711 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193615 4711 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193624 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193634 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193643 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193652 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193661 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193671 4711 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193680 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193688 4711 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193697 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193707 4711 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193717 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193725 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193734 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193745 4711 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193754 4711 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193763 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193780 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193789 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193798 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193808 4711 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193817 4711 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193828 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193838 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193848 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193884 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193894 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193903 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193913 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193923 4711 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193932 4711 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193941 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193951 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193960 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193969 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193978 4711 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193987 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.193996 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194005 4711 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194014 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194024 4711 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194033 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194041 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194050 4711 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194058 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194068 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194077 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194086 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194095 4711 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194103 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194113 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194122 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194131 4711 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194139 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194148 4711 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194157 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194166 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194176 4711 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194187 4711 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194196 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194204 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194214 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194223 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194233 4711 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194243 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194253 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194261 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194270 4711 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194278 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194288 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194296 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.194305 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.196542 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:09:40.696504396 +0000 UTC m=+26.280826806 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.197363 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.203099 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.203337 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.203856 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.204471 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.204985 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.207516 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.189686 4711 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.214269 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.214644 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.214663 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.214751 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:40.714723416 +0000 UTC m=+26.299045746 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.215497 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.216680 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.216867 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.216940 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.217086 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:40.717056706 +0000 UTC m=+26.301379126 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.218013 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.220234 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.226628 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.228899 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.232948 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.249509 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.252515 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.253490 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295380 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295483 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295531 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295542 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295520 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295552 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295639 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295657 4711 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295672 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295684 4711 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295612 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295696 4711 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295708 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295719 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.295730 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.332481 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.340068 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:09:40 crc kubenswrapper[4711]: W1205 12:09:40.344345 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-081918b10d521da0186ef46cda9eb308d36aeaf050b8b01b394e3757746ec8f7 WatchSource:0}: Error finding container 081918b10d521da0186ef46cda9eb308d36aeaf050b8b01b394e3757746ec8f7: Status 404 returned error can't find the container with id 081918b10d521da0186ef46cda9eb308d36aeaf050b8b01b394e3757746ec8f7 Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.347166 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:09:40 crc kubenswrapper[4711]: W1205 12:09:40.361569 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-787a6995db0bd9b815ccd830ecbf74ac202325334e8beaf4a1cca0307712392d WatchSource:0}: Error finding container 787a6995db0bd9b815ccd830ecbf74ac202325334e8beaf4a1cca0307712392d: Status 404 returned error can't find the container with id 787a6995db0bd9b815ccd830ecbf74ac202325334e8beaf4a1cca0307712392d Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.686308 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.687206 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.688591 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.689325 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.690669 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.691136 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.691720 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.692786 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.693364 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.694366 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.694918 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.696092 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.696831 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.697643 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.698244 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.699245 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.699380 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.699434 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.699511 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:09:41.6994681 +0000 UTC m=+27.283790430 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.699592 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.699601 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.699782 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:41.699762956 +0000 UTC m=+27.284085286 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.699805 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:41.699794946 +0000 UTC m=+27.284117276 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.699941 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.700808 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.701239 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.702087 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.703305 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.703817 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.705082 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.705660 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.706478 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.706868 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.708149 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.708874 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.709914 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.710482 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.711357 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.711998 4711 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.712100 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.715109 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.715909 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.716576 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.718943 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.720057 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.720597 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.721671 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.722538 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.723475 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.724085 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.725095 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.725720 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.726557 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.727061 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.727945 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.728683 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.729654 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.730100 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.730927 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.731462 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.736890 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.738037 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.800919 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.800992 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.801119 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.801138 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.801151 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.801119 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.801238 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.801251 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.801211 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:41.801189175 +0000 UTC m=+27.385511505 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:40 crc kubenswrapper[4711]: E1205 12:09:40.801305 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:41.801291837 +0000 UTC m=+27.385614177 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.809045 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-05 12:04:39 +0000 UTC, rotation deadline is 2026-10-29 18:44:44.580096608 +0000 UTC Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.809098 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7878h35m3.771001517s for next certificate rotation Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.834679 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.836264 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8"} Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.837431 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.839092 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa"} Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.839138 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"dd721e966069956115b0b5e26a44cf1368dd2b811a9f5827fad97164e7f54ab5"} Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.839943 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"787a6995db0bd9b815ccd830ecbf74ac202325334e8beaf4a1cca0307712392d"} Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.841196 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07"} Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.841227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"081918b10d521da0186ef46cda9eb308d36aeaf050b8b01b394e3757746ec8f7"} Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.863824 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.912121 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.928018 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.939185 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.955164 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.980027 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:40 crc kubenswrapper[4711]: I1205 12:09:40.995852 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.007746 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.021146 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.035607 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.051707 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.073868 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.087213 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.101821 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.660215 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.674930 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.678799 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.682218 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.682218 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.682347 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.682400 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.682418 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.682552 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.685995 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.701134 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.714071 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.714196 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.714234 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:09:43.714191319 +0000 UTC m=+29.298513689 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.714299 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.714332 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.714432 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:43.714409515 +0000 UTC m=+29.298731865 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.714517 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.714672 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:43.714641359 +0000 UTC m=+29.298963749 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.739522 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.759286 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.773751 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.801899 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.815120 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.815169 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.815323 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.815342 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.815353 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.815373 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.815433 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:43.815414136 +0000 UTC m=+29.399736466 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.815433 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.815455 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.815526 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:43.815501758 +0000 UTC m=+29.399824148 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.827223 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.849745 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8"} Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.858097 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.878455 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: E1205 12:09:41.879852 4711 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.937114 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:41 crc kubenswrapper[4711]: I1205 12:09:41.979786 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.030720 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.069144 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.100619 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.130243 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.180680 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.230179 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.276310 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-drklt"] Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.276747 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-r45dj"] Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.276994 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.277010 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-r45dj" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.281137 4711 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.281204 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.281250 4711 reflector.go:561] object-"openshift-dns"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.281262 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.281310 4711 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.281325 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.282433 4711 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.282460 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.282532 4711 reflector.go:561] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": failed to list *v1.Secret: secrets "node-resolver-dockercfg-kz9s7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.282544 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"node-resolver-dockercfg-kz9s7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-resolver-dockercfg-kz9s7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.282595 4711 reflector.go:561] object-"openshift-dns"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.282606 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.282638 4711 reflector.go:561] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.282649 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.283922 4711 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.283947 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.288831 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.314792 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.344063 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.374318 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.391482 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.408307 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.420758 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/47bc5fb6-f724-409c-9a04-3c5e50951dd0-mcd-auth-proxy-config\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.420815 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzl57\" (UniqueName: \"kubernetes.io/projected/cb3908e1-8749-46d7-a003-a9f7a8574715-kube-api-access-kzl57\") pod \"node-resolver-r45dj\" (UID: \"cb3908e1-8749-46d7-a003-a9f7a8574715\") " pod="openshift-dns/node-resolver-r45dj" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.420843 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/47bc5fb6-f724-409c-9a04-3c5e50951dd0-rootfs\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.421047 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/cb3908e1-8749-46d7-a003-a9f7a8574715-hosts-file\") pod \"node-resolver-r45dj\" (UID: \"cb3908e1-8749-46d7-a003-a9f7a8574715\") " pod="openshift-dns/node-resolver-r45dj" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.421103 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/47bc5fb6-f724-409c-9a04-3c5e50951dd0-proxy-tls\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.421180 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjvmx\" (UniqueName: \"kubernetes.io/projected/47bc5fb6-f724-409c-9a04-3c5e50951dd0-kube-api-access-gjvmx\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.421723 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.441931 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.457545 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.471785 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.489052 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.505425 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.522259 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.522477 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjvmx\" (UniqueName: \"kubernetes.io/projected/47bc5fb6-f724-409c-9a04-3c5e50951dd0-kube-api-access-gjvmx\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.522531 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/47bc5fb6-f724-409c-9a04-3c5e50951dd0-mcd-auth-proxy-config\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.522558 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzl57\" (UniqueName: \"kubernetes.io/projected/cb3908e1-8749-46d7-a003-a9f7a8574715-kube-api-access-kzl57\") pod \"node-resolver-r45dj\" (UID: \"cb3908e1-8749-46d7-a003-a9f7a8574715\") " pod="openshift-dns/node-resolver-r45dj" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.522586 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/47bc5fb6-f724-409c-9a04-3c5e50951dd0-rootfs\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.522623 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/cb3908e1-8749-46d7-a003-a9f7a8574715-hosts-file\") pod \"node-resolver-r45dj\" (UID: \"cb3908e1-8749-46d7-a003-a9f7a8574715\") " pod="openshift-dns/node-resolver-r45dj" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.522658 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/47bc5fb6-f724-409c-9a04-3c5e50951dd0-proxy-tls\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.522722 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/cb3908e1-8749-46d7-a003-a9f7a8574715-hosts-file\") pod \"node-resolver-r45dj\" (UID: \"cb3908e1-8749-46d7-a003-a9f7a8574715\") " pod="openshift-dns/node-resolver-r45dj" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.522723 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/47bc5fb6-f724-409c-9a04-3c5e50951dd0-rootfs\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.539577 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.556750 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.578614 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.711587 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-wgkqk"] Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.712112 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.714619 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.715071 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.715294 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.715462 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.716620 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ndz5q"] Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.717408 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.718369 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-6jvvs"] Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.718949 4711 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": failed to list *v1.Secret: secrets "ovn-node-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.718985 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-node-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-node-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.718992 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.719182 4711 reflector.go:561] object-"openshift-ovn-kubernetes"/"env-overrides": failed to list *v1.ConfigMap: configmaps "env-overrides" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.719217 4711 reflector.go:561] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.719226 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"env-overrides\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"env-overrides\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.719231 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: W1205 12:09:42.719255 4711 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": failed to list *v1.ConfigMap: configmaps "ovnkube-script-lib" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:09:42 crc kubenswrapper[4711]: E1205 12:09:42.719267 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovnkube-script-lib\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovnkube-script-lib\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.722432 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.722480 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.722808 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.723457 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.723699 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.724019 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.729267 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.745968 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.766974 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.782044 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.804258 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.824530 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825156 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-var-lib-kubelet\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825207 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/0df94722-138f-4247-b308-3e3ccadc54b5-multus-daemon-config\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825251 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-netns\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825278 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825310 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-os-release\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825335 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-log-socket\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825357 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-os-release\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825378 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-hostroot\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825471 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpc4w\" (UniqueName: \"kubernetes.io/projected/0df94722-138f-4247-b308-3e3ccadc54b5-kube-api-access-tpc4w\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825497 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-systemd\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825517 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-bin\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825541 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-systemd-units\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825562 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-ovn-kubernetes\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825583 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-system-cni-dir\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825608 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-cnibin\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825630 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-slash\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825672 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825759 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-run-k8s-cni-cncf-io\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825812 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-node-log\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825838 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovn-node-metrics-cert\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825869 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj2xq\" (UniqueName: \"kubernetes.io/projected/3bce8b78-05d7-4003-9231-24d2e07f0c2a-kube-api-access-fj2xq\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825918 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-var-lib-cni-multus\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825939 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-cnibin\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825960 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6debde47-b5cc-400a-b7fc-0419770d0bd4-cni-binary-copy\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.825997 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-openvswitch\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826046 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-multus-cni-dir\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826065 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6debde47-b5cc-400a-b7fc-0419770d0bd4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826101 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-config\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826120 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0df94722-138f-4247-b308-3e3ccadc54b5-cni-binary-copy\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826138 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-multus-conf-dir\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826165 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-multus-socket-dir-parent\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826193 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-etc-kubernetes\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826222 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5pxw\" (UniqueName: \"kubernetes.io/projected/6debde47-b5cc-400a-b7fc-0419770d0bd4-kube-api-access-f5pxw\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826245 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-kubelet\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826273 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-netd\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826320 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-var-lib-cni-bin\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826346 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-etc-openvswitch\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826371 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-ovn\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826426 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-system-cni-dir\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826455 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-run-netns\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826494 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-run-multus-certs\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826516 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-var-lib-openvswitch\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826534 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-env-overrides\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.826552 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-script-lib\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.842915 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.857309 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.885831 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.915183 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929536 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-multus-cni-dir\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929595 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6debde47-b5cc-400a-b7fc-0419770d0bd4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929620 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-config\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929660 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-multus-conf-dir\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929687 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0df94722-138f-4247-b308-3e3ccadc54b5-cni-binary-copy\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929705 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-etc-kubernetes\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929726 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5pxw\" (UniqueName: \"kubernetes.io/projected/6debde47-b5cc-400a-b7fc-0419770d0bd4-kube-api-access-f5pxw\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929747 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-kubelet\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929766 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-netd\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929802 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-multus-socket-dir-parent\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929823 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-var-lib-cni-bin\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929845 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-etc-openvswitch\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929868 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-ovn\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929910 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-run-netns\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929932 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-run-multus-certs\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929958 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-var-lib-openvswitch\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929978 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-env-overrides\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.929998 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-script-lib\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930034 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-system-cni-dir\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930078 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-var-lib-kubelet\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/0df94722-138f-4247-b308-3e3ccadc54b5-multus-daemon-config\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930120 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-netns\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930143 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930176 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-os-release\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930197 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-log-socket\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930218 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-os-release\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930226 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-var-lib-cni-bin\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930239 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-hostroot\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930288 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-hostroot\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930331 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpc4w\" (UniqueName: \"kubernetes.io/projected/0df94722-138f-4247-b308-3e3ccadc54b5-kube-api-access-tpc4w\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930353 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-systemd\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930378 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-bin\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930415 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-run-netns\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930436 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-ovn-kubernetes\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930442 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-run-multus-certs\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930455 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-system-cni-dir\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930472 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-var-lib-openvswitch\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930475 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-cnibin\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930499 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-cnibin\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930508 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-systemd-units\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930533 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930553 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-slash\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930573 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-node-log\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930594 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovn-node-metrics-cert\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930631 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-run-k8s-cni-cncf-io\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930666 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-var-lib-cni-multus\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930691 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj2xq\" (UniqueName: \"kubernetes.io/projected/3bce8b78-05d7-4003-9231-24d2e07f0c2a-kube-api-access-fj2xq\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930715 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6debde47-b5cc-400a-b7fc-0419770d0bd4-cni-binary-copy\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930736 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-openvswitch\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930758 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-cnibin\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930791 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-multus-cni-dir\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.931095 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-system-cni-dir\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.931494 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-var-lib-kubelet\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.931498 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6debde47-b5cc-400a-b7fc-0419770d0bd4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932042 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-config\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932169 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/0df94722-138f-4247-b308-3e3ccadc54b5-multus-daemon-config\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932222 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-netns\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932252 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932347 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-multus-conf-dir\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932525 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-os-release\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932564 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-log-socket\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932606 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-os-release\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932633 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-systemd-units\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932863 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0df94722-138f-4247-b308-3e3ccadc54b5-cni-binary-copy\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932922 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-etc-kubernetes\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932948 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.932981 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-slash\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933011 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-node-log\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933327 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-run-k8s-cni-cncf-io\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933354 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-kubelet\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933368 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-host-var-lib-cni-multus\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933419 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-netd\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933426 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-multus-socket-dir-parent\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933590 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-systemd\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930357 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-etc-openvswitch\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933645 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-bin\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.930379 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-ovn\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933686 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-ovn-kubernetes\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933718 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6debde47-b5cc-400a-b7fc-0419770d0bd4-system-cni-dir\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933755 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-openvswitch\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.933800 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0df94722-138f-4247-b308-3e3ccadc54b5-cnibin\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.934181 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6debde47-b5cc-400a-b7fc-0419770d0bd4-cni-binary-copy\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.973270 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpc4w\" (UniqueName: \"kubernetes.io/projected/0df94722-138f-4247-b308-3e3ccadc54b5-kube-api-access-tpc4w\") pod \"multus-wgkqk\" (UID: \"0df94722-138f-4247-b308-3e3ccadc54b5\") " pod="openshift-multus/multus-wgkqk" Dec 05 12:09:42 crc kubenswrapper[4711]: I1205 12:09:42.988733 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5pxw\" (UniqueName: \"kubernetes.io/projected/6debde47-b5cc-400a-b7fc-0419770d0bd4-kube-api-access-f5pxw\") pod \"multus-additional-cni-plugins-6jvvs\" (UID: \"6debde47-b5cc-400a-b7fc-0419770d0bd4\") " pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.035708 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wgkqk" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.039217 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.046805 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" Dec 05 12:09:43 crc kubenswrapper[4711]: W1205 12:09:43.051944 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0df94722_138f_4247_b308_3e3ccadc54b5.slice/crio-b54cef2572339c13ea12850d42f71da4608c5fe040d8d764d45f5a14b5e47840 WatchSource:0}: Error finding container b54cef2572339c13ea12850d42f71da4608c5fe040d8d764d45f5a14b5e47840: Status 404 returned error can't find the container with id b54cef2572339c13ea12850d42f71da4608c5fe040d8d764d45f5a14b5e47840 Dec 05 12:09:43 crc kubenswrapper[4711]: W1205 12:09:43.077527 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6debde47_b5cc_400a_b7fc_0419770d0bd4.slice/crio-a956904e7bcee0871dc060e1bf4cfbd22fc7635f0e31141e9fbf74d793b74e3b WatchSource:0}: Error finding container a956904e7bcee0871dc060e1bf4cfbd22fc7635f0e31141e9fbf74d793b74e3b: Status 404 returned error can't find the container with id a956904e7bcee0871dc060e1bf4cfbd22fc7635f0e31141e9fbf74d793b74e3b Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.093955 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.114676 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.132799 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.161201 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.183979 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.198846 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.215669 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.233304 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.254080 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.269470 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.286828 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.289072 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.311465 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.328259 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.368442 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.480825 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.522932 4711 secret.go:188] Couldn't get secret openshift-machine-config-operator/proxy-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.523013 4711 configmap.go:193] Couldn't get configMap openshift-machine-config-operator/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.523065 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/47bc5fb6-f724-409c-9a04-3c5e50951dd0-proxy-tls podName:47bc5fb6-f724-409c-9a04-3c5e50951dd0 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:44.023038597 +0000 UTC m=+29.607360917 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/47bc5fb6-f724-409c-9a04-3c5e50951dd0-proxy-tls") pod "machine-config-daemon-drklt" (UID: "47bc5fb6-f724-409c-9a04-3c5e50951dd0") : failed to sync secret cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.523134 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/47bc5fb6-f724-409c-9a04-3c5e50951dd0-mcd-auth-proxy-config podName:47bc5fb6-f724-409c-9a04-3c5e50951dd0 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:44.023107159 +0000 UTC m=+29.607429559 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "mcd-auth-proxy-config" (UniqueName: "kubernetes.io/configmap/47bc5fb6-f724-409c-9a04-3c5e50951dd0-mcd-auth-proxy-config") pod "machine-config-daemon-drklt" (UID: "47bc5fb6-f724-409c-9a04-3c5e50951dd0") : failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.541581 4711 projected.go:288] Couldn't get configMap openshift-machine-config-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.598021 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.682098 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.682284 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.682372 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.682455 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.682514 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.682572 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.741109 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.741286 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:09:47.741259648 +0000 UTC m=+33.325581978 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.741336 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.741401 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.741478 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.741501 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.741531 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:47.741524294 +0000 UTC m=+33.325846624 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.741542 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:47.741537165 +0000 UTC m=+33.325859495 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.742239 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.745493 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.751810 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-script-lib\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.842024 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.842108 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.842269 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.842287 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.842298 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.842352 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:47.842336064 +0000 UTC m=+33.426658394 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.842417 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.842428 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.842434 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.842456 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:47.842447337 +0000 UTC m=+33.426769657 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.851629 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.851903 4711 projected.go:194] Error preparing data for projected volume kube-api-access-gjvmx for pod openshift-machine-config-operator/machine-config-daemon-drklt: failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.852005 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/47bc5fb6-f724-409c-9a04-3c5e50951dd0-kube-api-access-gjvmx podName:47bc5fb6-f724-409c-9a04-3c5e50951dd0 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:44.351976927 +0000 UTC m=+29.936299307 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-gjvmx" (UniqueName: "kubernetes.io/projected/47bc5fb6-f724-409c-9a04-3c5e50951dd0-kube-api-access-gjvmx") pod "machine-config-daemon-drklt" (UID: "47bc5fb6-f724-409c-9a04-3c5e50951dd0") : failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.856280 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86"} Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.856296 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.858232 4711 generic.go:334] "Generic (PLEG): container finished" podID="6debde47-b5cc-400a-b7fc-0419770d0bd4" containerID="0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2" exitCode=0 Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.858322 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerDied","Data":"0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2"} Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.858353 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerStarted","Data":"a956904e7bcee0871dc060e1bf4cfbd22fc7635f0e31141e9fbf74d793b74e3b"} Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.862137 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgkqk" event={"ID":"0df94722-138f-4247-b308-3e3ccadc54b5","Type":"ContainerStarted","Data":"76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019"} Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.862185 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgkqk" event={"ID":"0df94722-138f-4247-b308-3e3ccadc54b5","Type":"ContainerStarted","Data":"b54cef2572339c13ea12850d42f71da4608c5fe040d8d764d45f5a14b5e47840"} Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.877304 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.882420 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzl57\" (UniqueName: \"kubernetes.io/projected/cb3908e1-8749-46d7-a003-a9f7a8574715-kube-api-access-kzl57\") pod \"node-resolver-r45dj\" (UID: \"cb3908e1-8749-46d7-a003-a9f7a8574715\") " pod="openshift-dns/node-resolver-r45dj" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.882916 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.900961 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.930992 4711 configmap.go:193] Couldn't get configMap openshift-ovn-kubernetes/env-overrides: failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.931135 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-env-overrides podName:3bce8b78-05d7-4003-9231-24d2e07f0c2a nodeName:}" failed. No retries permitted until 2025-12-05 12:09:44.431099333 +0000 UTC m=+30.015421663 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "env-overrides" (UniqueName: "kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-env-overrides") pod "ovnkube-node-ndz5q" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a") : failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.933337 4711 secret.go:188] Couldn't get secret openshift-ovn-kubernetes/ovn-node-metrics-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.933490 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovn-node-metrics-cert podName:3bce8b78-05d7-4003-9231-24d2e07f0c2a nodeName:}" failed. No retries permitted until 2025-12-05 12:09:44.433456169 +0000 UTC m=+30.017778569 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-node-metrics-cert" (UniqueName: "kubernetes.io/secret/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovn-node-metrics-cert") pod "ovnkube-node-ndz5q" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a") : failed to sync secret cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.933554 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.951170 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.966126 4711 projected.go:288] Couldn't get configMap openshift-ovn-kubernetes/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.966177 4711 projected.go:194] Error preparing data for projected volume kube-api-access-fj2xq for pod openshift-ovn-kubernetes/ovnkube-node-ndz5q: failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: E1205 12:09:43.966285 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3bce8b78-05d7-4003-9231-24d2e07f0c2a-kube-api-access-fj2xq podName:3bce8b78-05d7-4003-9231-24d2e07f0c2a nodeName:}" failed. No retries permitted until 2025-12-05 12:09:44.466251683 +0000 UTC m=+30.050574073 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-fj2xq" (UniqueName: "kubernetes.io/projected/3bce8b78-05d7-4003-9231-24d2e07f0c2a-kube-api-access-fj2xq") pod "ovnkube-node-ndz5q" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a") : failed to sync configmap cache: timed out waiting for the condition Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.968043 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.984492 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:43 crc kubenswrapper[4711]: I1205 12:09:43.998285 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.012687 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.031654 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.043132 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/47bc5fb6-f724-409c-9a04-3c5e50951dd0-mcd-auth-proxy-config\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.043184 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/47bc5fb6-f724-409c-9a04-3c5e50951dd0-proxy-tls\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.044000 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/47bc5fb6-f724-409c-9a04-3c5e50951dd0-mcd-auth-proxy-config\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.052597 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/47bc5fb6-f724-409c-9a04-3c5e50951dd0-proxy-tls\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.054339 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.071850 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.086538 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.096030 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-r45dj" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.100041 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.112411 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb3908e1_8749_46d7_a003_a9f7a8574715.slice/crio-61ea3386b999334a672c714fc0226e5765c5c090ebbba3c15a93ef8762e8f124 WatchSource:0}: Error finding container 61ea3386b999334a672c714fc0226e5765c5c090ebbba3c15a93ef8762e8f124: Status 404 returned error can't find the container with id 61ea3386b999334a672c714fc0226e5765c5c090ebbba3c15a93ef8762e8f124 Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.118916 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.144224 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.157933 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.183164 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.201642 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.218048 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.225658 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.244027 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.251900 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.269955 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.278193 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-k95n8"] Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.282873 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.288121 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.288126 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.288258 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.288323 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.288348 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.293203 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.310577 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.328216 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.344961 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.345628 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4fd49ea4-f954-4aed-969e-2f913b5172b6-serviceca\") pod \"node-ca-k95n8\" (UID: \"4fd49ea4-f954-4aed-969e-2f913b5172b6\") " pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.345655 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4fd49ea4-f954-4aed-969e-2f913b5172b6-host\") pod \"node-ca-k95n8\" (UID: \"4fd49ea4-f954-4aed-969e-2f913b5172b6\") " pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.345701 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjcl4\" (UniqueName: \"kubernetes.io/projected/4fd49ea4-f954-4aed-969e-2f913b5172b6-kube-api-access-rjcl4\") pod \"node-ca-k95n8\" (UID: \"4fd49ea4-f954-4aed-969e-2f913b5172b6\") " pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.364971 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.380107 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.398450 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.413682 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.431891 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.446674 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjcl4\" (UniqueName: \"kubernetes.io/projected/4fd49ea4-f954-4aed-969e-2f913b5172b6-kube-api-access-rjcl4\") pod \"node-ca-k95n8\" (UID: \"4fd49ea4-f954-4aed-969e-2f913b5172b6\") " pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.446717 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjvmx\" (UniqueName: \"kubernetes.io/projected/47bc5fb6-f724-409c-9a04-3c5e50951dd0-kube-api-access-gjvmx\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.446757 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-env-overrides\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.446775 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4fd49ea4-f954-4aed-969e-2f913b5172b6-serviceca\") pod \"node-ca-k95n8\" (UID: \"4fd49ea4-f954-4aed-969e-2f913b5172b6\") " pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.446791 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4fd49ea4-f954-4aed-969e-2f913b5172b6-host\") pod \"node-ca-k95n8\" (UID: \"4fd49ea4-f954-4aed-969e-2f913b5172b6\") " pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.446820 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovn-node-metrics-cert\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.447758 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-env-overrides\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.448295 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4fd49ea4-f954-4aed-969e-2f913b5172b6-host\") pod \"node-ca-k95n8\" (UID: \"4fd49ea4-f954-4aed-969e-2f913b5172b6\") " pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.448565 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4fd49ea4-f954-4aed-969e-2f913b5172b6-serviceca\") pod \"node-ca-k95n8\" (UID: \"4fd49ea4-f954-4aed-969e-2f913b5172b6\") " pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.450083 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovn-node-metrics-cert\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.450903 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjvmx\" (UniqueName: \"kubernetes.io/projected/47bc5fb6-f724-409c-9a04-3c5e50951dd0-kube-api-access-gjvmx\") pod \"machine-config-daemon-drklt\" (UID: \"47bc5fb6-f724-409c-9a04-3c5e50951dd0\") " pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.451855 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.454950 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.460205 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.465972 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.470932 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.472404 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjcl4\" (UniqueName: \"kubernetes.io/projected/4fd49ea4-f954-4aed-969e-2f913b5172b6-kube-api-access-rjcl4\") pod \"node-ca-k95n8\" (UID: \"4fd49ea4-f954-4aed-969e-2f913b5172b6\") " pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.497762 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.528804 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.548020 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj2xq\" (UniqueName: \"kubernetes.io/projected/3bce8b78-05d7-4003-9231-24d2e07f0c2a-kube-api-access-fj2xq\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.551635 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.555082 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj2xq\" (UniqueName: \"kubernetes.io/projected/3bce8b78-05d7-4003-9231-24d2e07f0c2a-kube-api-access-fj2xq\") pod \"ovnkube-node-ndz5q\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.577625 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.594183 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.599045 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-k95n8" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.609963 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.681373 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.690448 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.754217 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.799470 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.827667 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.839929 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.848478 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.855656 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3bce8b78_05d7_4003_9231_24d2e07f0c2a.slice/crio-22c63587e5f3c11aed126725cd14775a67729e8054733e771e47db7679f65768 WatchSource:0}: Error finding container 22c63587e5f3c11aed126725cd14775a67729e8054733e771e47db7679f65768: Status 404 returned error can't find the container with id 22c63587e5f3c11aed126725cd14775a67729e8054733e771e47db7679f65768 Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.863784 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.866214 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"22c63587e5f3c11aed126725cd14775a67729e8054733e771e47db7679f65768"} Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.870564 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"a613f655b710c4778d53a3971d1a5c6200142d9adeb0769ae76c3b392fd8f0cf"} Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.870912 4711 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.871661 4711 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.871794 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-console/pods/networking-console-plugin-85b44fc459-gdk6g/status\": read tcp 38.129.56.38:42450->38.129.56.38:6443: use of closed network connection" Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.871899 4711 reflector.go:484] object-"openshift-image-registry"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.871948 4711 reflector.go:484] object-"openshift-ovn-kubernetes"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.872343 4711 reflector.go:484] object-"openshift-image-registry"/"node-ca-dockercfg-4777p": watch of *v1.Secret ended with: very short watch: object-"openshift-image-registry"/"node-ca-dockercfg-4777p": Unexpected watch close - watch lasted less than a second and no items received Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.872489 4711 reflector.go:484] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.872680 4711 reflector.go:484] object-"openshift-image-registry"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.872739 4711 reflector.go:484] object-"openshift-dns"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 12:09:44 crc kubenswrapper[4711]: W1205 12:09:44.872936 4711 reflector.go:484] object-"openshift-image-registry"/"image-registry-certificates": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"image-registry-certificates": Unexpected watch close - watch lasted less than a second and no items received Dec 05 12:09:44 crc kubenswrapper[4711]: E1205 12:09:44.872859 4711 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events/crc.187e507220d4c595\": read tcp 38.129.56.38:42450->38.129.56.38:6443: use of closed network connection" event="&Event{ObjectMeta:{crc.187e507220d4c595 default 26615 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:09:15 +0000 UTC,LastTimestamp:2025-12-05 12:09:19.387288329 +0000 UTC m=+4.971610659,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.889005 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerStarted","Data":"cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37"} Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.895354 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-k95n8" event={"ID":"4fd49ea4-f954-4aed-969e-2f913b5172b6","Type":"ContainerStarted","Data":"226e3a879e3e2e7b02a67ceb42795936388cae36d07561fd89e037d0dbd550e4"} Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.900173 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-r45dj" event={"ID":"cb3908e1-8749-46d7-a003-a9f7a8574715","Type":"ContainerStarted","Data":"95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d"} Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.900242 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-r45dj" event={"ID":"cb3908e1-8749-46d7-a003-a9f7a8574715","Type":"ContainerStarted","Data":"61ea3386b999334a672c714fc0226e5765c5c090ebbba3c15a93ef8762e8f124"} Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.909680 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.928718 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.957339 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:44 crc kubenswrapper[4711]: I1205 12:09:44.978442 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.007243 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.026500 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.049524 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.062708 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.079179 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.094023 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.135734 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.180269 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.215095 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.255226 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.293206 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.333379 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.374497 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.414591 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.463808 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.494156 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.533345 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.577033 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.619889 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.653701 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.682556 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:45 crc kubenswrapper[4711]: E1205 12:09:45.682718 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.682780 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:45 crc kubenswrapper[4711]: E1205 12:09:45.682821 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.682867 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:45 crc kubenswrapper[4711]: E1205 12:09:45.682911 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.694016 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.888865 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.909495 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18" exitCode=0 Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.909583 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18"} Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.911490 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e"} Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.911562 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4"} Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.914435 4711 generic.go:334] "Generic (PLEG): container finished" podID="6debde47-b5cc-400a-b7fc-0419770d0bd4" containerID="cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37" exitCode=0 Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.914630 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerDied","Data":"cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37"} Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.916295 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-k95n8" event={"ID":"4fd49ea4-f954-4aed-969e-2f913b5172b6","Type":"ContainerStarted","Data":"76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36"} Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.932606 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.946971 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.966088 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.983363 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:45 crc kubenswrapper[4711]: I1205 12:09:45.998227 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.012135 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.036554 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.051443 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.072885 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.122001 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.149119 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.151241 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.151298 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.151310 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.151456 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.153053 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.202565 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.223164 4711 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.223521 4711 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.225426 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.225455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.225465 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.225481 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.225492 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: E1205 12:09:46.246302 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.250098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.250159 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.250174 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.250197 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.250211 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.255691 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: E1205 12:09:46.262913 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.266268 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.266306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.266315 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.266330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.266340 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: E1205 12:09:46.278987 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.283426 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.283488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.283503 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.283521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.283532 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.292180 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: E1205 12:09:46.297412 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.301168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.301212 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.301225 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.301244 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.301256 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: E1205 12:09:46.312518 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: E1205 12:09:46.312660 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.314111 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.314167 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.314183 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.314208 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.314224 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.332751 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.376903 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.412325 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.417139 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.417214 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.417238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.417301 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.417324 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.422662 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.443257 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.494344 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.519617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.519669 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.519682 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.519702 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.519713 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.532245 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.572525 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.617094 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.625789 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.625836 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.625849 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.625868 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.625880 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.653167 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.698952 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.728665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.728787 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.728796 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.728837 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.728856 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.735991 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.742588 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.792370 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.833887 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.835826 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.835876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.835887 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.835911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.835922 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.874968 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.914853 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.925747 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.925792 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.925805 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.929419 4711 generic.go:334] "Generic (PLEG): container finished" podID="6debde47-b5cc-400a-b7fc-0419770d0bd4" containerID="530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41" exitCode=0 Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.929903 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerDied","Data":"530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.939260 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.939407 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.939478 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.939548 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.939628 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:46Z","lastTransitionTime":"2025-12-05T12:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:46 crc kubenswrapper[4711]: I1205 12:09:46.958096 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.003686 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.006413 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.042722 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.042780 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.042790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.042812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.042824 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:47Z","lastTransitionTime":"2025-12-05T12:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.055829 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.094775 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.130108 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.146181 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.146273 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.146291 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.146313 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.146328 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:47Z","lastTransitionTime":"2025-12-05T12:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.174486 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.212323 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.249525 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.249579 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.249594 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.249615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.249632 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:47Z","lastTransitionTime":"2025-12-05T12:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.253062 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.291465 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.340407 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.352866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.352907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.352917 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.352937 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.352949 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:47Z","lastTransitionTime":"2025-12-05T12:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.373752 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.416034 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.455919 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.455959 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.455970 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.455987 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.455999 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:47Z","lastTransitionTime":"2025-12-05T12:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.462230 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.491956 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.532879 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.558434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.558482 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.558491 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.558517 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.558536 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:47Z","lastTransitionTime":"2025-12-05T12:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.572784 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.619328 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.653766 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.661551 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.661594 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.661605 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.661624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.661635 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:47Z","lastTransitionTime":"2025-12-05T12:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.682886 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.682970 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.682886 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.683058 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.683158 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.683246 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.792174 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.792412 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:09:55.792350201 +0000 UTC m=+41.376672531 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.792535 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.792692 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.792883 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.792878 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.793125 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:55.792949815 +0000 UTC m=+41.377272145 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.793161 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:55.79313898 +0000 UTC m=+41.377461310 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.801341 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.801379 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.801408 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.801427 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.801440 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:47Z","lastTransitionTime":"2025-12-05T12:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.893412 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.893476 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.893638 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.893656 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.893671 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.893673 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.893735 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.893759 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.893738 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:55.893719714 +0000 UTC m=+41.478042034 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:47 crc kubenswrapper[4711]: E1205 12:09:47.893864 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:55.893838397 +0000 UTC m=+41.478160727 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.904417 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.904460 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.904471 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.904489 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.904500 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:47Z","lastTransitionTime":"2025-12-05T12:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.936145 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.936200 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.936211 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.939135 4711 generic.go:334] "Generic (PLEG): container finished" podID="6debde47-b5cc-400a-b7fc-0419770d0bd4" containerID="addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513" exitCode=0 Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.939163 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerDied","Data":"addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513"} Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.959287 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.972550 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 12:09:47 crc kubenswrapper[4711]: I1205 12:09:47.987635 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.000047 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.008483 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.008530 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.008544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.008566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.008577 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.014231 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.031218 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.053489 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.102901 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.110899 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.110934 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.110943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.110959 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.110971 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.126032 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.147298 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.161490 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.178053 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.190800 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.215278 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.216903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.216943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.216955 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.216974 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.216985 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.234510 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.264698 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.291618 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.320218 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.320275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.320287 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.320306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.320318 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.422950 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.423032 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.423050 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.423080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.423090 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.526761 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.526814 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.526827 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.526852 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.526864 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.628972 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.629011 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.629020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.629036 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.629046 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.700422 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.714921 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.731618 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.731665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.731676 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.731695 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.731705 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.732217 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.747753 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.770586 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.788434 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.805218 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.821136 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.834470 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.834509 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.834520 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.834538 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.834552 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.839742 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.859255 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.875561 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.896191 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.917897 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.931549 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.936450 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.936479 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.936489 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.936506 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.936515 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:48Z","lastTransitionTime":"2025-12-05T12:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.945676 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.948041 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerStarted","Data":"5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253"} Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.966788 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:48 crc kubenswrapper[4711]: I1205 12:09:48.981278 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.012462 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.039039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.039085 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.039094 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.039109 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.039121 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.052645 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.098446 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.132605 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.142205 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.142256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.142269 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.142288 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.142301 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.175210 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.213335 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.245713 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.245769 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.245784 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.245807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.245820 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.252592 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.296141 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.331823 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.350505 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.350561 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.350575 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.350597 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.350610 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.373054 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.419255 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.451622 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.453153 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.453200 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.453212 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.453230 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.453242 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.491633 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.556151 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.556239 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.556251 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.556270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.556302 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.658474 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.658521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.658530 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.658544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.658554 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.683004 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.683067 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:49 crc kubenswrapper[4711]: E1205 12:09:49.683153 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.683202 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:49 crc kubenswrapper[4711]: E1205 12:09:49.683219 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:49 crc kubenswrapper[4711]: E1205 12:09:49.683246 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.761960 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.762005 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.762020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.762037 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.762050 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.864978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.865027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.865038 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.865057 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.865068 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.967179 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.967249 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.967261 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.967282 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:49 crc kubenswrapper[4711]: I1205 12:09:49.967296 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:49Z","lastTransitionTime":"2025-12-05T12:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.070358 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.070422 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.070432 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.070450 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.070460 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.103335 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.126336 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.146240 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.163775 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.173415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.173468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.173480 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.173521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.173534 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.180033 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.194344 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.207866 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.224754 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.244310 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.260268 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.276184 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.276247 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.276260 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.276280 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.276345 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.276371 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.290262 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.379520 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.379563 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.379574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.379593 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.379605 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.453439 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.467739 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.482757 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.482812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.482824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.482843 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.482855 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.482760 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.495772 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:50Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.585318 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.585373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.585402 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.585425 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.585437 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.688834 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.688935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.688965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.689039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.689070 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.791756 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.791790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.791799 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.791815 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.791827 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.894429 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.894505 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.894525 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.894547 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.894585 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.996833 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.996879 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.996887 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.996903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:50 crc kubenswrapper[4711]: I1205 12:09:50.996913 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:50Z","lastTransitionTime":"2025-12-05T12:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.100156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.100204 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.100215 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.100238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.100255 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:51Z","lastTransitionTime":"2025-12-05T12:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.203461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.203521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.203532 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.203565 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.203577 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:51Z","lastTransitionTime":"2025-12-05T12:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.305952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.305993 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.306001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.306037 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.306047 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:51Z","lastTransitionTime":"2025-12-05T12:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.408444 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.408501 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.408517 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.408542 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.408569 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:51Z","lastTransitionTime":"2025-12-05T12:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.511309 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.511361 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.511376 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.511420 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.511438 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:51Z","lastTransitionTime":"2025-12-05T12:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.615035 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.615094 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.615107 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.615129 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.615143 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:51Z","lastTransitionTime":"2025-12-05T12:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.683005 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.683020 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:51 crc kubenswrapper[4711]: E1205 12:09:51.683176 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.683021 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:51 crc kubenswrapper[4711]: E1205 12:09:51.683354 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:51 crc kubenswrapper[4711]: E1205 12:09:51.683375 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.718181 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.718240 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.718257 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.718282 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.718302 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:51Z","lastTransitionTime":"2025-12-05T12:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.821121 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.821179 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.821192 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.821211 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.821224 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:51Z","lastTransitionTime":"2025-12-05T12:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.923804 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.923866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.923882 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.923902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:51 crc kubenswrapper[4711]: I1205 12:09:51.923918 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:51Z","lastTransitionTime":"2025-12-05T12:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.026042 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.026091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.026108 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.026130 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.026141 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.127991 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.128035 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.128045 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.128061 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.128071 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.231006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.231068 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.231083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.231106 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.231118 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.333033 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.333088 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.333097 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.333118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.333133 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.435279 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.435331 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.435339 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.435357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.435367 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.538592 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.538640 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.538650 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.538668 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.538682 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.641653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.641765 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.641792 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.641826 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.641850 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.745194 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.745250 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.745263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.745284 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.745298 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.848289 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.848331 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.848344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.848368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.848381 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.951488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.951540 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.951549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.951569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:52 crc kubenswrapper[4711]: I1205 12:09:52.951579 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:52Z","lastTransitionTime":"2025-12-05T12:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.054700 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.054743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.054752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.054776 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.054785 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.157430 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.157477 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.157487 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.157502 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.157512 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.260051 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.260103 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.260118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.260140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.260153 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.362950 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.362991 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.363000 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.363018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.363027 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.466337 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.466434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.466447 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.466465 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.466478 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.569352 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.569416 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.569428 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.569445 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.569459 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.672294 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.672343 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.672353 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.672369 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.672379 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.682929 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.683032 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:53 crc kubenswrapper[4711]: E1205 12:09:53.683067 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.683123 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:53 crc kubenswrapper[4711]: E1205 12:09:53.683215 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:53 crc kubenswrapper[4711]: E1205 12:09:53.683455 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.775599 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.775716 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.775744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.775782 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.775807 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.879492 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.879590 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.879624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.879650 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.879667 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.983079 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.983138 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.983152 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.983171 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:53 crc kubenswrapper[4711]: I1205 12:09:53.983183 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:53Z","lastTransitionTime":"2025-12-05T12:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.086785 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.086860 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.086884 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.086971 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.086995 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:54Z","lastTransitionTime":"2025-12-05T12:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.190006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.190082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.190094 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.190114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.190149 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:54Z","lastTransitionTime":"2025-12-05T12:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.292905 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.292981 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.292998 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.293022 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.293039 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:54Z","lastTransitionTime":"2025-12-05T12:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.395990 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.396039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.396051 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.396067 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.396078 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:54Z","lastTransitionTime":"2025-12-05T12:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.502621 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.502726 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.502747 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.502998 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.503024 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:54Z","lastTransitionTime":"2025-12-05T12:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.605805 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.605890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.605913 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.605943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.605961 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:54Z","lastTransitionTime":"2025-12-05T12:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.708590 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.708660 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.708700 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.708731 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.708751 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:54Z","lastTransitionTime":"2025-12-05T12:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.811954 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.811998 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.812010 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.812030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.812042 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:54Z","lastTransitionTime":"2025-12-05T12:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.914919 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.914969 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.914980 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.914997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.915007 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:54Z","lastTransitionTime":"2025-12-05T12:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.971507 4711 generic.go:334] "Generic (PLEG): container finished" podID="6debde47-b5cc-400a-b7fc-0419770d0bd4" containerID="5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253" exitCode=0 Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.971562 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerDied","Data":"5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253"} Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.986868 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:54 crc kubenswrapper[4711]: I1205 12:09:54.999758 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.014208 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.018352 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.018421 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.018677 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.018714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.018728 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.027791 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.039986 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.054581 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.074130 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.087261 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.101294 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.112344 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.122047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.122088 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.122098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.122114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.122125 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.124590 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.138996 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.152277 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.174350 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.190302 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.224699 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.224755 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.224767 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.224789 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.224800 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.266639 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg"] Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.267150 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.269479 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.269702 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.286992 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.307309 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.323033 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.328442 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.328487 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.328499 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.328518 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.328529 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.339601 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.355598 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.368426 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af1537c8-0de1-476a-8001-2904bb594b76-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.368625 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af1537c8-0de1-476a-8001-2904bb594b76-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.368693 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs7vd\" (UniqueName: \"kubernetes.io/projected/af1537c8-0de1-476a-8001-2904bb594b76-kube-api-access-rs7vd\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.368855 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af1537c8-0de1-476a-8001-2904bb594b76-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.378881 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.393566 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.409509 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.423355 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.431084 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.431131 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.431148 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.431166 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.431179 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.435018 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.450947 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.464894 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.470338 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af1537c8-0de1-476a-8001-2904bb594b76-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.470436 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af1537c8-0de1-476a-8001-2904bb594b76-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.470495 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af1537c8-0de1-476a-8001-2904bb594b76-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.470517 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs7vd\" (UniqueName: \"kubernetes.io/projected/af1537c8-0de1-476a-8001-2904bb594b76-kube-api-access-rs7vd\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.471173 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af1537c8-0de1-476a-8001-2904bb594b76-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.471305 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af1537c8-0de1-476a-8001-2904bb594b76-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.477858 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af1537c8-0de1-476a-8001-2904bb594b76-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.487968 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.488925 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs7vd\" (UniqueName: \"kubernetes.io/projected/af1537c8-0de1-476a-8001-2904bb594b76-kube-api-access-rs7vd\") pod \"ovnkube-control-plane-749d76644c-7zqqg\" (UID: \"af1537c8-0de1-476a-8001-2904bb594b76\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.504274 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.517043 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.526463 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.533481 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.533532 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.533576 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.533596 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.533607 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.602246 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" Dec 05 12:09:55 crc kubenswrapper[4711]: W1205 12:09:55.615109 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf1537c8_0de1_476a_8001_2904bb594b76.slice/crio-5ae185681f32d5e42027907fc1416bd698ce063be1ee9d450ba9a667e599fb07 WatchSource:0}: Error finding container 5ae185681f32d5e42027907fc1416bd698ce063be1ee9d450ba9a667e599fb07: Status 404 returned error can't find the container with id 5ae185681f32d5e42027907fc1416bd698ce063be1ee9d450ba9a667e599fb07 Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.646111 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.646158 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.646167 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.646183 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.646195 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.683148 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.683188 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.683217 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.683423 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.683552 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.683684 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.748946 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.748994 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.749006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.749029 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.749041 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.851917 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.851965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.851977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.851995 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.852006 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.874004 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.874128 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.874203 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:11.874178943 +0000 UTC m=+57.458501293 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.874221 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.874239 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.874277 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:11.874264745 +0000 UTC m=+57.458587075 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.874344 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.874420 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:11.874376148 +0000 UTC m=+57.458698488 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.955533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.955577 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.955585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.955601 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.955610 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:55Z","lastTransitionTime":"2025-12-05T12:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.974897 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.974975 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.975071 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.975099 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.975113 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.975153 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.975180 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:11.975159547 +0000 UTC m=+57.559481877 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.975188 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.975204 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:55 crc kubenswrapper[4711]: E1205 12:09:55.975267 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:11.975242819 +0000 UTC m=+57.559565139 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.983467 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} Dec 05 12:09:55 crc kubenswrapper[4711]: I1205 12:09:55.985509 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" event={"ID":"af1537c8-0de1-476a-8001-2904bb594b76","Type":"ContainerStarted","Data":"5ae185681f32d5e42027907fc1416bd698ce063be1ee9d450ba9a667e599fb07"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.058889 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.058946 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.058954 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.058970 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.058980 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.161149 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.161208 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.161220 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.161242 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.161254 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.263910 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.263971 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.263983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.264003 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.264014 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.366878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.366934 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.366949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.366967 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.366978 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.469508 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.469619 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.469630 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.469649 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.469673 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.572154 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.572200 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.572212 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.572233 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.572244 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.666368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.666423 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.666431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.666446 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.666455 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: E1205 12:09:56.681645 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.690836 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.690876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.690886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.690899 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.690908 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: E1205 12:09:56.704408 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.708704 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.708739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.708750 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.708764 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.708775 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: E1205 12:09:56.720788 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.724826 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.724858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.724869 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.724889 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.724909 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: E1205 12:09:56.737941 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.742503 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.742555 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.742565 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.742586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.742597 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.753796 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-hv9gb"] Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.754444 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:09:56 crc kubenswrapper[4711]: E1205 12:09:56.754536 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:09:56 crc kubenswrapper[4711]: E1205 12:09:56.756275 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: E1205 12:09:56.756446 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.758624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.758657 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.758668 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.758683 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.758693 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.770890 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.781793 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.781846 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6rmm\" (UniqueName: \"kubernetes.io/projected/fa5afbc1-d1a4-40c8-990d-72a8169d5072-kube-api-access-s6rmm\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.783680 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.797932 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.813850 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.826372 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.839611 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.851974 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.861851 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.861893 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.861902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.861918 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.861929 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.863335 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.879800 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.882867 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.882916 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6rmm\" (UniqueName: \"kubernetes.io/projected/fa5afbc1-d1a4-40c8-990d-72a8169d5072-kube-api-access-s6rmm\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:09:56 crc kubenswrapper[4711]: E1205 12:09:56.883060 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:09:56 crc kubenswrapper[4711]: E1205 12:09:56.883191 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs podName:fa5afbc1-d1a4-40c8-990d-72a8169d5072 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:57.383166529 +0000 UTC m=+42.967488849 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs") pod "network-metrics-daemon-hv9gb" (UID: "fa5afbc1-d1a4-40c8-990d-72a8169d5072") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.894022 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.902296 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6rmm\" (UniqueName: \"kubernetes.io/projected/fa5afbc1-d1a4-40c8-990d-72a8169d5072-kube-api-access-s6rmm\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.919136 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.932332 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.943721 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.962550 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.964618 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.964669 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.964679 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.964699 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.964711 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:56Z","lastTransitionTime":"2025-12-05T12:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.981014 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.992419 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerStarted","Data":"96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477"} Dec 05 12:09:56 crc kubenswrapper[4711]: I1205 12:09:56.997335 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:56Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.011453 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.066991 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.067030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.067039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.067055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.067063 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.170195 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.170232 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.170242 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.170290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.170301 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.273107 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.273156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.273177 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.273201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.273215 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.376229 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.376279 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.376290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.376307 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.376319 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.389181 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:09:57 crc kubenswrapper[4711]: E1205 12:09:57.389428 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:09:57 crc kubenswrapper[4711]: E1205 12:09:57.389688 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs podName:fa5afbc1-d1a4-40c8-990d-72a8169d5072 nodeName:}" failed. No retries permitted until 2025-12-05 12:09:58.389665266 +0000 UTC m=+43.973987586 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs") pod "network-metrics-daemon-hv9gb" (UID: "fa5afbc1-d1a4-40c8-990d-72a8169d5072") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.478912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.478960 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.478973 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.478990 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.479002 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.582401 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.582439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.582447 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.582462 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.582473 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.682362 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.682420 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.682449 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:57 crc kubenswrapper[4711]: E1205 12:09:57.682552 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:57 crc kubenswrapper[4711]: E1205 12:09:57.682701 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:57 crc kubenswrapper[4711]: E1205 12:09:57.682846 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.684271 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.684309 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.684322 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.684340 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.684353 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.786504 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.786550 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.786566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.786589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.786601 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.888775 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.888815 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.888825 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.888841 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.888853 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.992298 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.992368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.992437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.992521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.992540 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:57Z","lastTransitionTime":"2025-12-05T12:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.998495 4711 generic.go:334] "Generic (PLEG): container finished" podID="6debde47-b5cc-400a-b7fc-0419770d0bd4" containerID="96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477" exitCode=0 Dec 05 12:09:57 crc kubenswrapper[4711]: I1205 12:09:57.998612 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerDied","Data":"96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477"} Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.000380 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" event={"ID":"af1537c8-0de1-476a-8001-2904bb594b76","Type":"ContainerStarted","Data":"ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41"} Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.014872 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.026916 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.046860 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.061237 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.078287 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.091797 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.095715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.095755 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.095767 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.095785 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.095795 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:58Z","lastTransitionTime":"2025-12-05T12:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.105533 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.125175 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.139063 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.152203 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.170759 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.183569 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.193022 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.198398 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.198431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.198442 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.198459 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.198471 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:58Z","lastTransitionTime":"2025-12-05T12:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.206822 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.220296 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.232410 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.247002 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.300885 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.300936 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.300945 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.300961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.300976 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:58Z","lastTransitionTime":"2025-12-05T12:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.397269 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:09:58 crc kubenswrapper[4711]: E1205 12:09:58.397510 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:09:58 crc kubenswrapper[4711]: E1205 12:09:58.397615 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs podName:fa5afbc1-d1a4-40c8-990d-72a8169d5072 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:00.397585707 +0000 UTC m=+45.981908087 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs") pod "network-metrics-daemon-hv9gb" (UID: "fa5afbc1-d1a4-40c8-990d-72a8169d5072") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.404922 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.404977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.404991 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.405010 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.405024 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:58Z","lastTransitionTime":"2025-12-05T12:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.507975 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.508030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.508043 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.508063 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.508092 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:58Z","lastTransitionTime":"2025-12-05T12:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.611056 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.611092 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.611100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.611114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.611123 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:58Z","lastTransitionTime":"2025-12-05T12:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:58 crc kubenswrapper[4711]: I1205 12:09:58.682907 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:09:58 crc kubenswrapper[4711]: E1205 12:09:58.683346 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.019616 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.019690 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.019707 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.019736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.019754 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.045921 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.060990 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.076611 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.096899 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.110203 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.122755 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.122821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.122837 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.122860 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.122874 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.123932 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.138259 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.152819 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.166086 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.185711 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.203452 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.219812 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.225264 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.225310 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.225319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.225340 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.225350 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.234296 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.251732 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.267105 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.282644 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.297732 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:09:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.327175 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.327217 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.327229 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.327246 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.327260 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.430163 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.430222 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.430234 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.430254 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.430267 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.532791 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.532878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.532904 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.532942 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.532969 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.637073 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.637121 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.637132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.637150 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.637163 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.682615 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.682706 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.682814 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:09:59 crc kubenswrapper[4711]: E1205 12:09:59.682958 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:09:59 crc kubenswrapper[4711]: E1205 12:09:59.683181 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:09:59 crc kubenswrapper[4711]: E1205 12:09:59.683448 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.740628 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.740686 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.740709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.740736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.740756 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.844436 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.844475 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.844487 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.844508 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.844518 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.948910 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.949083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.949167 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.949284 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:09:59 crc kubenswrapper[4711]: I1205 12:09:59.949369 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:09:59Z","lastTransitionTime":"2025-12-05T12:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.028631 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" event={"ID":"af1537c8-0de1-476a-8001-2904bb594b76","Type":"ContainerStarted","Data":"a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.032919 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" event={"ID":"6debde47-b5cc-400a-b7fc-0419770d0bd4","Type":"ContainerStarted","Data":"a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.037517 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.037863 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.052491 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.052542 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.052554 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.052607 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.052622 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.054168 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.068510 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.073107 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.088935 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.104783 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.120898 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.137986 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.155252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.155301 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.155313 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.155330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.155343 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.159860 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.173843 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.189006 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.209138 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.227594 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.239050 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.252751 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.257227 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.257265 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.257275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.257292 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.257302 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.267315 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.279163 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.291234 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.305310 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.316296 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.327554 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.347516 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.360460 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.360509 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.360525 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.360590 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.360609 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.363716 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.374752 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.386375 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.398811 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.410903 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.422731 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.430149 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:00 crc kubenswrapper[4711]: E1205 12:10:00.430272 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:10:00 crc kubenswrapper[4711]: E1205 12:10:00.430341 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs podName:fa5afbc1-d1a4-40c8-990d-72a8169d5072 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:04.430324797 +0000 UTC m=+50.014647127 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs") pod "network-metrics-daemon-hv9gb" (UID: "fa5afbc1-d1a4-40c8-990d-72a8169d5072") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.436050 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.451234 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.463152 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.463203 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.463213 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.463233 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.463242 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.465169 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.477597 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.487207 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.501363 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.512338 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.532413 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.565673 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.565739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.565753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.565776 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.565790 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.668330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.668375 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.668404 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.668424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.668434 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.682660 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:00 crc kubenswrapper[4711]: E1205 12:10:00.682843 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.771415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.771459 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.771471 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.771490 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.771502 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.874246 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.874305 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.874316 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.874334 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.874364 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.977440 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.977489 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.977500 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.977520 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:00 crc kubenswrapper[4711]: I1205 12:10:00.977532 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:00Z","lastTransitionTime":"2025-12-05T12:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.040174 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.040728 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.068075 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.079907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.079951 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.079963 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.079984 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.079997 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:01Z","lastTransitionTime":"2025-12-05T12:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.091515 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.107652 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.127323 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.146550 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.159104 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.173889 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.183084 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.183129 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.183140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.183156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.183171 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:01Z","lastTransitionTime":"2025-12-05T12:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.186405 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.201646 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.215477 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.233805 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.247887 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.259747 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.276070 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.285383 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.285458 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.285471 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.285493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.285505 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:01Z","lastTransitionTime":"2025-12-05T12:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.290160 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.306368 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.321605 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.336646 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.388598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.388660 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.388676 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.388697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.388710 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:01Z","lastTransitionTime":"2025-12-05T12:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.530949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.531006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.531021 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.531039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.531052 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:01Z","lastTransitionTime":"2025-12-05T12:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.634238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.634286 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.634295 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.634314 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.634327 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:01Z","lastTransitionTime":"2025-12-05T12:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.682969 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.683035 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.683006 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:01 crc kubenswrapper[4711]: E1205 12:10:01.683202 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:01 crc kubenswrapper[4711]: E1205 12:10:01.683356 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:01 crc kubenswrapper[4711]: E1205 12:10:01.683512 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.736733 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.737015 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.737027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.737045 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.737056 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:01Z","lastTransitionTime":"2025-12-05T12:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.839859 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.839893 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.839904 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.839925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.839938 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:01Z","lastTransitionTime":"2025-12-05T12:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.942895 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.942979 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.942998 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.943039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:01 crc kubenswrapper[4711]: I1205 12:10:01.943056 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:01Z","lastTransitionTime":"2025-12-05T12:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.042908 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.044890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.044920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.044932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.044945 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.044955 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:02Z","lastTransitionTime":"2025-12-05T12:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.147685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.147736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.147745 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.147761 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.147771 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:02Z","lastTransitionTime":"2025-12-05T12:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.251051 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.251108 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.251120 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.251140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.251152 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:02Z","lastTransitionTime":"2025-12-05T12:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.353484 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.353545 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.353557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.353578 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.353595 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:02Z","lastTransitionTime":"2025-12-05T12:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.458105 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.458155 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.458166 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.458185 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.458196 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:02Z","lastTransitionTime":"2025-12-05T12:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.560384 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.560544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.560558 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.560579 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.560592 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:02Z","lastTransitionTime":"2025-12-05T12:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.682148 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:02 crc kubenswrapper[4711]: E1205 12:10:02.682299 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.682861 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.682900 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.682909 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.682924 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.682936 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:02Z","lastTransitionTime":"2025-12-05T12:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.794680 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.794730 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.794741 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.794759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.794770 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:02Z","lastTransitionTime":"2025-12-05T12:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.897040 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.897081 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.897091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.897108 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:02 crc kubenswrapper[4711]: I1205 12:10:02.897120 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:02Z","lastTransitionTime":"2025-12-05T12:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:02.999938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:02.999991 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.000000 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.000016 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.000025 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.046427 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.103475 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.103549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.103568 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.103615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.103633 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.206372 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.206443 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.206454 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.206472 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.206484 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.309264 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.309326 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.309335 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.309356 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.309372 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.413688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.413798 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.413819 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.413846 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.413862 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.517120 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.517175 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.517186 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.517207 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.517219 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.620367 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.620443 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.620453 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.620471 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.620482 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.682758 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.682826 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.682882 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:03 crc kubenswrapper[4711]: E1205 12:10:03.682927 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:03 crc kubenswrapper[4711]: E1205 12:10:03.683019 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:03 crc kubenswrapper[4711]: E1205 12:10:03.683072 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.723675 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.723749 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.723767 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.723797 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.723829 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.827195 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.827597 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.827608 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.827643 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.827656 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.930288 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.930336 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.930344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.930360 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:03 crc kubenswrapper[4711]: I1205 12:10:03.930369 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:03Z","lastTransitionTime":"2025-12-05T12:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.033276 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.033357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.033405 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.033424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.033435 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.051743 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/0.log" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.054167 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8" exitCode=1 Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.054214 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.054928 4711 scope.go:117] "RemoveContainer" containerID="a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.078647 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.096963 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.112155 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.124904 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.134711 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.135966 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.136030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.136042 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.136082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.136094 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.154898 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.181923 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.205656 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.227732 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.238876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.238928 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.238938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.238955 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.238982 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.248863 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:03Z\\\",\\\"message\\\":\\\"ng reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:03.530828 6042 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:03.530935 6042 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:03.531324 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:03.531422 6042 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:10:03.531433 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:10:03.531451 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:10:03.531463 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:10:03.531588 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:10:03.531810 6042 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:10:03.531864 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:10:03.531898 6042 factory.go:656] Stopping watch factory\\\\nI1205 12:10:03.531908 6042 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:10:03.531919 6042 ovnkube.go:599] Stopped ovnkube\\\\nI1205 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.268968 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.283960 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.296917 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.313245 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.331067 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.341397 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.341448 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.341464 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.341484 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.341498 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.346516 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.365777 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.444359 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.444432 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.444444 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.444467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.444479 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.513365 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:04 crc kubenswrapper[4711]: E1205 12:10:04.513545 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:10:04 crc kubenswrapper[4711]: E1205 12:10:04.513620 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs podName:fa5afbc1-d1a4-40c8-990d-72a8169d5072 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:12.513600368 +0000 UTC m=+58.097922698 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs") pod "network-metrics-daemon-hv9gb" (UID: "fa5afbc1-d1a4-40c8-990d-72a8169d5072") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.547504 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.547540 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.547549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.547564 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.547575 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.650969 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.651021 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.651035 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.651054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.651069 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.682501 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:04 crc kubenswrapper[4711]: E1205 12:10:04.687538 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.754489 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.754553 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.754583 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.754598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.754608 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.858102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.858160 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.858172 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.858190 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.858202 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.961678 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.961729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.961741 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.961759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:04 crc kubenswrapper[4711]: I1205 12:10:04.961772 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:04Z","lastTransitionTime":"2025-12-05T12:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.060449 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/0.log" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.064025 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.064083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.064113 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.064131 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.064160 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.064692 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.064863 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.090531 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:03Z\\\",\\\"message\\\":\\\"ng reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:03.530828 6042 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:03.530935 6042 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:03.531324 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:03.531422 6042 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:10:03.531433 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:10:03.531451 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:10:03.531463 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:10:03.531588 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:10:03.531810 6042 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:10:03.531864 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:10:03.531898 6042 factory.go:656] Stopping watch factory\\\\nI1205 12:10:03.531908 6042 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:10:03.531919 6042 ovnkube.go:599] Stopped ovnkube\\\\nI1205 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.111529 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.127662 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.141769 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.155871 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.166747 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.166798 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.166813 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.166832 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.166844 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.167613 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.182049 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.196878 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.209466 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.223378 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.237161 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.250629 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.263992 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.269634 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.269890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.269975 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.270066 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.270196 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.287645 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.306173 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.324657 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.344306 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:05Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.373245 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.373525 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.373822 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.374030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.374318 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.477353 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.477624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.477687 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.477748 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.477833 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.581268 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.581310 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.581323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.581342 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.581355 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.682149 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.682250 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.682373 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:05 crc kubenswrapper[4711]: E1205 12:10:05.682638 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:05 crc kubenswrapper[4711]: E1205 12:10:05.682737 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:05 crc kubenswrapper[4711]: E1205 12:10:05.683168 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.684330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.684376 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.684430 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.684450 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.684461 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.787238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.787284 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.787297 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.787318 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.787331 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.890292 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.890350 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.890362 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.890378 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.890403 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.993501 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.993908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.994014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.994091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:05 crc kubenswrapper[4711]: I1205 12:10:05.994159 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:05Z","lastTransitionTime":"2025-12-05T12:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.070902 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/1.log" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.071856 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/0.log" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.075777 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71" exitCode=1 Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.075828 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.075882 4711 scope.go:117] "RemoveContainer" containerID="a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.077873 4711 scope.go:117] "RemoveContainer" containerID="7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71" Dec 05 12:10:06 crc kubenswrapper[4711]: E1205 12:10:06.078625 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-ndz5q_openshift-ovn-kubernetes(3bce8b78-05d7-4003-9231-24d2e07f0c2a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.098748 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.098790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.098801 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.098823 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.098835 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.106252 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:03Z\\\",\\\"message\\\":\\\"ng reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:03.530828 6042 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:03.530935 6042 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:03.531324 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:03.531422 6042 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:10:03.531433 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:10:03.531451 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:10:03.531463 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:10:03.531588 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:10:03.531810 6042 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:10:03.531864 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:10:03.531898 6042 factory.go:656] Stopping watch factory\\\\nI1205 12:10:03.531908 6042 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:10:03.531919 6042 ovnkube.go:599] Stopped ovnkube\\\\nI1205 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.126623 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.141217 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.156532 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.173670 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.187322 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.201870 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.201903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.201912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.201928 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.201939 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.205652 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.222045 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.240255 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.257821 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.270655 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.288193 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.300120 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.305256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.305302 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.305311 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.305328 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.305339 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.320771 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.334853 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.348380 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.361849 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.408691 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.408739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.408751 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.408773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.408786 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.511018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.511075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.511086 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.511106 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.511124 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.614260 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.614345 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.614357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.614373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.614417 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.683149 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:06 crc kubenswrapper[4711]: E1205 12:10:06.683370 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.717581 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.717654 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.717688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.717709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.717722 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.779212 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.779272 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.779293 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.779314 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.779326 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: E1205 12:10:06.791987 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.796221 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.796258 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.796270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.796285 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.796300 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: E1205 12:10:06.813060 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.818785 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.818841 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.818855 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.818873 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.818888 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: E1205 12:10:06.832098 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.836264 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.836305 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.836316 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.836336 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.836350 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: E1205 12:10:06.849043 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.857810 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.857864 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.857882 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.857905 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.857921 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: E1205 12:10:06.873582 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:06Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:06 crc kubenswrapper[4711]: E1205 12:10:06.873784 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.875961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.876017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.876033 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.876057 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.876071 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.978943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.979002 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.979014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.979035 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:06 crc kubenswrapper[4711]: I1205 12:10:06.979048 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:06Z","lastTransitionTime":"2025-12-05T12:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.081441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.081493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.081508 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.081527 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.081541 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:07Z","lastTransitionTime":"2025-12-05T12:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.081811 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/1.log" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.184409 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.184449 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.184458 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.184474 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.184485 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:07Z","lastTransitionTime":"2025-12-05T12:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.286487 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.286569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.286582 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.286600 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.286612 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:07Z","lastTransitionTime":"2025-12-05T12:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.389790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.389846 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.389856 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.389875 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.389887 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:07Z","lastTransitionTime":"2025-12-05T12:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.492814 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.492854 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.492862 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.492876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.492886 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:07Z","lastTransitionTime":"2025-12-05T12:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.596256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.596323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.596335 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.596415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.596432 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:07Z","lastTransitionTime":"2025-12-05T12:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.682429 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.682497 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.682508 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:07 crc kubenswrapper[4711]: E1205 12:10:07.682603 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:07 crc kubenswrapper[4711]: E1205 12:10:07.682804 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:07 crc kubenswrapper[4711]: E1205 12:10:07.682890 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.699298 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.699350 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.699368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.699407 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.699420 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:07Z","lastTransitionTime":"2025-12-05T12:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.802649 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.802707 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.802721 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.802739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.802753 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:07Z","lastTransitionTime":"2025-12-05T12:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.906114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.906207 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.906232 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.906265 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:07 crc kubenswrapper[4711]: I1205 12:10:07.906290 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:07Z","lastTransitionTime":"2025-12-05T12:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.010220 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.010303 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.010319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.010347 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.010369 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.112640 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.112705 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.112716 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.112730 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.112755 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.215663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.215716 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.215725 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.215742 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.215753 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.319020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.319063 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.319072 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.319089 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.319098 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.422175 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.422235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.422249 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.422273 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.422293 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.525546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.525608 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.525621 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.525641 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.525654 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.628169 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.628224 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.628235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.628253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.628271 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.682716 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:08 crc kubenswrapper[4711]: E1205 12:10:08.682880 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.697335 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.709560 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.724963 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.730971 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.731020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.731031 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.731055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.731080 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.739104 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.753935 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.773453 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.794548 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.809634 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.825765 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.833618 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.833664 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.833675 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.833692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.833705 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.843112 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.857729 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.873739 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.889425 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.904432 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.923485 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.936612 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.936665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.936674 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.936690 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.936701 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:08Z","lastTransitionTime":"2025-12-05T12:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.946768 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a4c27639cc560d4b132dfbee3a5e1ed586106c678f4fa1c3df0fa98cdadd92e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:03Z\\\",\\\"message\\\":\\\"ng reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:03.530828 6042 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:03.530935 6042 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:03.531324 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:03.531422 6042 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:10:03.531433 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:10:03.531451 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:10:03.531463 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:10:03.531588 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:10:03.531810 6042 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:10:03.531864 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:10:03.531898 6042 factory.go:656] Stopping watch factory\\\\nI1205 12:10:03.531908 6042 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:10:03.531919 6042 ovnkube.go:599] Stopped ovnkube\\\\nI1205 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:08 crc kubenswrapper[4711]: I1205 12:10:08.961871 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:08Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.039557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.039604 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.039618 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.039639 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.039653 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.142752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.142823 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.142835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.142855 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.142895 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.246167 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.246218 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.246228 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.246249 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.246259 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.349340 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.349412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.349427 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.349443 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.349455 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.452306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.452369 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.452411 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.452433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.452448 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.555923 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.555978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.555991 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.556011 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.556020 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.658989 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.659046 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.659057 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.659087 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.659099 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.682850 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.682921 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.682958 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:09 crc kubenswrapper[4711]: E1205 12:10:09.683072 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:09 crc kubenswrapper[4711]: E1205 12:10:09.683324 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:09 crc kubenswrapper[4711]: E1205 12:10:09.683476 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.761761 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.761820 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.761836 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.761852 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.761861 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.865553 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.865629 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.865732 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.865759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.865769 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.968340 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.968419 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.968435 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.968457 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:09 crc kubenswrapper[4711]: I1205 12:10:09.968468 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:09Z","lastTransitionTime":"2025-12-05T12:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.071250 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.071299 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.071311 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.071330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.071340 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.174320 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.174365 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.174375 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.174412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.174426 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.276505 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.276550 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.276561 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.276580 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.276591 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.378929 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.379033 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.379045 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.379062 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.379091 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.481799 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.481851 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.481860 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.481877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.481888 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.584694 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.584729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.584737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.584752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.584762 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.682429 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:10 crc kubenswrapper[4711]: E1205 12:10:10.682609 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.688474 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.688530 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.688545 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.688566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.688578 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.791685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.791725 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.791734 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.791752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.791771 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.894316 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.894668 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.894798 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.894917 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.895062 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.998751 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.998835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.998851 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.998879 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:10 crc kubenswrapper[4711]: I1205 12:10:10.998895 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:10Z","lastTransitionTime":"2025-12-05T12:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.102533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.102583 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.102596 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.102621 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.102634 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:11Z","lastTransitionTime":"2025-12-05T12:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.205288 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.205334 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.205343 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.205361 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.205373 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:11Z","lastTransitionTime":"2025-12-05T12:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.308809 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.309191 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.309202 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.309223 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.309233 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:11Z","lastTransitionTime":"2025-12-05T12:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.412179 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.412497 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.412585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.412652 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.412713 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:11Z","lastTransitionTime":"2025-12-05T12:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.515058 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.515102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.515118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.515137 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.515154 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:11Z","lastTransitionTime":"2025-12-05T12:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.574811 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.575795 4711 scope.go:117] "RemoveContainer" containerID="7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71" Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.575960 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-ndz5q_openshift-ovn-kubernetes(3bce8b78-05d7-4003-9231-24d2e07f0c2a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.588702 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.609565 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-ndz5q_openshift-ovn-kubernetes(3bce8b78-05d7-4003-9231-24d2e07f0c2a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.617148 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.617183 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.617198 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.617216 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.617231 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:11Z","lastTransitionTime":"2025-12-05T12:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.625337 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.637991 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.651308 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.662499 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.675203 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.682962 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.682966 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.683082 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.683134 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.683307 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.683535 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.688423 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.702237 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.714201 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.719777 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.719806 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.719814 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.719830 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.719842 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:11Z","lastTransitionTime":"2025-12-05T12:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.730905 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.741841 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.756651 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.773773 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.794569 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.810749 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.823907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.823963 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.823973 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.824008 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.824020 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:11Z","lastTransitionTime":"2025-12-05T12:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.826886 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.897642 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.897816 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.897908 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.898056 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.898130 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:43.898109176 +0000 UTC m=+89.482431546 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.898473 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:43.898455314 +0000 UTC m=+89.482777674 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.898563 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.898614 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:43.898605009 +0000 UTC m=+89.482927339 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.927027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.927069 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.927082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.927100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.927115 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:11Z","lastTransitionTime":"2025-12-05T12:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.998701 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:11 crc kubenswrapper[4711]: I1205 12:10:11.998816 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.998971 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.998996 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.999015 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.999080 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:43.999061959 +0000 UTC m=+89.583384289 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.999187 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.999229 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.999247 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:10:11 crc kubenswrapper[4711]: E1205 12:10:11.999527 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:43.999323065 +0000 UTC m=+89.583645565 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.030164 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.030201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.030209 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.030223 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.030234 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.132293 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.132348 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.132358 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.132373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.132400 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.235612 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.235665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.235674 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.235692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.235703 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.338539 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.338625 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.338636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.338684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.338694 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.441682 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.441728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.441739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.441760 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.441771 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.544558 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.544615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.544627 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.544647 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.544660 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.605723 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:12 crc kubenswrapper[4711]: E1205 12:10:12.605924 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:10:12 crc kubenswrapper[4711]: E1205 12:10:12.606049 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs podName:fa5afbc1-d1a4-40c8-990d-72a8169d5072 nodeName:}" failed. No retries permitted until 2025-12-05 12:10:28.606025057 +0000 UTC m=+74.190347387 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs") pod "network-metrics-daemon-hv9gb" (UID: "fa5afbc1-d1a4-40c8-990d-72a8169d5072") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.648015 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.648068 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.648082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.648105 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.648122 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.682322 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:12 crc kubenswrapper[4711]: E1205 12:10:12.682629 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.751440 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.751683 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.751694 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.751715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.751728 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.854346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.854408 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.854422 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.854441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.854452 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.956354 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.956414 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.956432 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.956451 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:12 crc kubenswrapper[4711]: I1205 12:10:12.956462 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:12Z","lastTransitionTime":"2025-12-05T12:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.059672 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.059720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.059734 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.059753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.059766 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.162964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.163022 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.163037 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.163059 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.163073 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.266047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.266567 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.266662 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.266749 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.266842 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.369665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.370008 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.370114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.370214 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.370311 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.473451 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.473493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.473502 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.473516 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.473525 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.576466 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.576517 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.576534 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.576554 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.576568 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.679296 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.679633 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.679714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.679799 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.679884 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.682602 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.682623 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.682602 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:13 crc kubenswrapper[4711]: E1205 12:10:13.682713 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:13 crc kubenswrapper[4711]: E1205 12:10:13.682772 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:13 crc kubenswrapper[4711]: E1205 12:10:13.682827 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.783511 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.783929 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.784024 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.784104 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.784180 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.889751 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.889802 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.889816 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.889836 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.889852 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.992413 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.992469 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.992486 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.992507 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:13 crc kubenswrapper[4711]: I1205 12:10:13.992521 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:13Z","lastTransitionTime":"2025-12-05T12:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.095820 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.096097 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.096168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.096281 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.096360 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:14Z","lastTransitionTime":"2025-12-05T12:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.199734 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.199790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.199804 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.199837 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.199853 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:14Z","lastTransitionTime":"2025-12-05T12:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.302413 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.302462 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.302471 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.302489 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.302500 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:14Z","lastTransitionTime":"2025-12-05T12:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.405692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.405996 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.406098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.406275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.406374 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:14Z","lastTransitionTime":"2025-12-05T12:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.509773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.509813 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.509823 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.509838 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.509848 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:14Z","lastTransitionTime":"2025-12-05T12:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.612908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.612952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.612961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.612977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.612988 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:14Z","lastTransitionTime":"2025-12-05T12:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.682574 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:14 crc kubenswrapper[4711]: E1205 12:10:14.682794 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.715459 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.716052 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.716068 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.716089 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.716102 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:14Z","lastTransitionTime":"2025-12-05T12:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.819521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.820265 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.820469 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.820859 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.821005 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:14Z","lastTransitionTime":"2025-12-05T12:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.924117 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.924167 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.924178 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.924198 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:14 crc kubenswrapper[4711]: I1205 12:10:14.924209 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:14Z","lastTransitionTime":"2025-12-05T12:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.026690 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.026982 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.027075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.027154 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.027427 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.131232 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.131300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.131314 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.131334 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.131351 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.234084 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.234144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.234156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.234173 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.234183 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.337091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.337134 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.337144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.337160 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.337171 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.439468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.439509 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.439520 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.439551 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.439561 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.456025 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.464717 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.478118 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.493480 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.506276 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.520064 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.541035 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.542759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.542887 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.542979 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.543072 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.543159 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.557546 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.574067 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.588500 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.598244 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.609685 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.622520 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.637190 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.645901 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.645947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.645957 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.645976 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.645989 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.651482 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.672195 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-ndz5q_openshift-ovn-kubernetes(3bce8b78-05d7-4003-9231-24d2e07f0c2a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.682499 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.682530 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.682499 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:15 crc kubenswrapper[4711]: E1205 12:10:15.682668 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:15 crc kubenswrapper[4711]: E1205 12:10:15.682778 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:15 crc kubenswrapper[4711]: E1205 12:10:15.682893 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.691171 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.706600 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.717381 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.748925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.748980 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.748993 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.749014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.749032 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.851652 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.851689 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.851698 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.851712 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.851724 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.955193 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.955274 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.955286 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.955304 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:15 crc kubenswrapper[4711]: I1205 12:10:15.955313 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:15Z","lastTransitionTime":"2025-12-05T12:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.058690 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.058771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.058784 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.058806 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.058818 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.162668 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.162731 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.162744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.162765 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.162778 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.265433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.265476 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.265488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.265506 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.265517 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.367913 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.367969 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.367978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.367997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.368009 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.471367 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.471467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.471476 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.471493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.471503 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.574329 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.574375 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.574451 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.574476 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.574491 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.676948 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.677570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.677659 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.677795 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.677878 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.682414 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:16 crc kubenswrapper[4711]: E1205 12:10:16.682580 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.781941 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.781995 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.782006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.782027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.782040 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.884348 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.884414 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.884429 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.884450 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.884466 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.986938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.986999 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.987010 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.987028 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:16 crc kubenswrapper[4711]: I1205 12:10:16.987039 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:16Z","lastTransitionTime":"2025-12-05T12:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.089299 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.089333 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.089341 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.089377 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.089411 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.098955 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.099005 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.099014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.099030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.099040 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: E1205 12:10:17.111846 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:17Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.116026 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.116088 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.116103 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.116124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.116136 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: E1205 12:10:17.130859 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:17Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.135909 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.135955 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.135966 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.135986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.135997 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: E1205 12:10:17.150140 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:17Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.154456 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.154508 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.154527 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.154564 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.154577 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: E1205 12:10:17.169606 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:17Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.173778 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.173818 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.173858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.173878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.173890 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: E1205 12:10:17.188039 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:17Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:17 crc kubenswrapper[4711]: E1205 12:10:17.188600 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.192663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.192713 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.192728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.192749 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.192762 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.296165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.296219 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.296236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.296263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.296277 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.402021 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.402073 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.402084 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.402103 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.402121 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.504890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.505226 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.505346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.505467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.505573 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.608164 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.608201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.608209 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.608224 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.608236 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.683139 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:17 crc kubenswrapper[4711]: E1205 12:10:17.683302 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.683329 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.683328 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:17 crc kubenswrapper[4711]: E1205 12:10:17.683417 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:17 crc kubenswrapper[4711]: E1205 12:10:17.683556 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.711101 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.711131 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.711146 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.711161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.711169 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.813691 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.813768 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.813776 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.813791 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.813801 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.916881 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.916913 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.916921 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.916937 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:17 crc kubenswrapper[4711]: I1205 12:10:17.916947 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:17Z","lastTransitionTime":"2025-12-05T12:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.019905 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.019960 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.019976 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.019996 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.020008 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.123027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.123074 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.123084 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.123102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.123113 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.225966 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.226049 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.226065 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.226091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.226116 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.329226 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.329287 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.329300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.329319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.329330 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.433067 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.433124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.433141 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.433165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.433177 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.535871 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.535927 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.535940 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.535960 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.535973 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.638812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.638863 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.638873 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.638892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.638904 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.682873 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:18 crc kubenswrapper[4711]: E1205 12:10:18.683066 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.708711 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.722341 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.738673 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.741221 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.741594 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.741702 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.741807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.741887 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.752774 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.765070 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.778494 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.793220 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.807980 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba5f7718-dcfb-40bf-85a6-d002cfd34ba5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.820938 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.833474 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.845624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.845726 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.845739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.845772 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.845788 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.854625 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-ndz5q_openshift-ovn-kubernetes(3bce8b78-05d7-4003-9231-24d2e07f0c2a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.869889 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.882939 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.894086 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.911932 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.927113 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.942569 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.948245 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.948264 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.948273 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.948287 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.948297 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:18Z","lastTransitionTime":"2025-12-05T12:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:18 crc kubenswrapper[4711]: I1205 12:10:18.962517 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:18Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.051877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.051918 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.051928 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.051986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.051998 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.154951 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.154994 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.155004 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.155018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.155027 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.259338 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.259404 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.259413 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.259430 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.259442 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.362407 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.362468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.362483 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.362501 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.362515 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.465659 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.465697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.465708 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.465724 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.465733 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.568574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.568878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.569083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.569270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.569445 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.671841 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.671891 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.671904 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.671924 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.671936 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.682310 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.682521 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.682620 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:19 crc kubenswrapper[4711]: E1205 12:10:19.682674 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:19 crc kubenswrapper[4711]: E1205 12:10:19.682782 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:19 crc kubenswrapper[4711]: E1205 12:10:19.682868 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.775125 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.775186 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.775195 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.775216 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.775225 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.878002 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.878318 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.878329 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.878349 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.878360 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.980679 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.980721 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.980733 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.980753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:19 crc kubenswrapper[4711]: I1205 12:10:19.980767 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:19Z","lastTransitionTime":"2025-12-05T12:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.082980 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.083011 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.083018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.083049 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.083060 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:20Z","lastTransitionTime":"2025-12-05T12:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.186423 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.186469 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.186513 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.186535 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.186545 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:20Z","lastTransitionTime":"2025-12-05T12:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.289197 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.289252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.289266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.289286 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.289297 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:20Z","lastTransitionTime":"2025-12-05T12:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.392456 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.392507 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.392526 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.392546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.392558 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:20Z","lastTransitionTime":"2025-12-05T12:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.495588 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.495638 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.495648 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.495667 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.495701 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:20Z","lastTransitionTime":"2025-12-05T12:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.598436 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.598486 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.598497 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.598517 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.598528 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:20Z","lastTransitionTime":"2025-12-05T12:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.682840 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:20 crc kubenswrapper[4711]: E1205 12:10:20.683045 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.702055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.702115 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.702127 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.702153 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.702164 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:20Z","lastTransitionTime":"2025-12-05T12:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.804584 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.804616 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.804624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.804640 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.804650 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:20Z","lastTransitionTime":"2025-12-05T12:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.907921 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.907985 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.907999 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.908018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:20 crc kubenswrapper[4711]: I1205 12:10:20.908032 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:20Z","lastTransitionTime":"2025-12-05T12:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.011013 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.011051 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.011065 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.011083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.011094 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.115371 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.115525 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.115562 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.115595 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.115618 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.218609 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.218662 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.218673 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.218694 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.218706 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.322165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.322215 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.322225 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.322243 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.322257 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.425057 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.425653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.425920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.426141 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.426320 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.529080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.529132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.529144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.529166 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.529179 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.646546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.646598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.646625 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.646644 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.646655 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.683014 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.683082 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.683142 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:21 crc kubenswrapper[4711]: E1205 12:10:21.683767 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:21 crc kubenswrapper[4711]: E1205 12:10:21.684140 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:21 crc kubenswrapper[4711]: E1205 12:10:21.684281 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.684315 4711 scope.go:117] "RemoveContainer" containerID="7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.749999 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.750048 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.750060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.750081 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.750102 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.852555 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.852611 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.852627 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.852647 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.852662 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.956057 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.956104 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.956118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.956144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:21 crc kubenswrapper[4711]: I1205 12:10:21.956157 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:21Z","lastTransitionTime":"2025-12-05T12:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.068235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.068296 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.068309 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.068330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.068345 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:22Z","lastTransitionTime":"2025-12-05T12:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.139117 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/1.log" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.156672 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.157651 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.170071 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba5f7718-dcfb-40bf-85a6-d002cfd34ba5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.172639 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.172684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.172692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.172709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.172720 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:22Z","lastTransitionTime":"2025-12-05T12:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.183379 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.197942 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.221772 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.243993 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.275483 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.275536 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.275549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.275568 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.275579 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:22Z","lastTransitionTime":"2025-12-05T12:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.277378 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.314636 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.349916 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.375943 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.377929 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.377958 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.377969 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.377985 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.377996 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:22Z","lastTransitionTime":"2025-12-05T12:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.391892 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.407794 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.420902 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.444829 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.459046 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.474637 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.480831 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.480874 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.480886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.480912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.480927 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:22Z","lastTransitionTime":"2025-12-05T12:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.489480 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.505035 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.526878 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.583326 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.583418 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.583433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.583455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.583468 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:22Z","lastTransitionTime":"2025-12-05T12:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.683097 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:22 crc kubenswrapper[4711]: E1205 12:10:22.683270 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.685799 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.685821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.685829 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.685843 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.685852 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:22Z","lastTransitionTime":"2025-12-05T12:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.822664 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.822735 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.822753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.822774 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.822787 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:22Z","lastTransitionTime":"2025-12-05T12:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.925913 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.925962 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.925972 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.925988 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:22 crc kubenswrapper[4711]: I1205 12:10:22.925998 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:22Z","lastTransitionTime":"2025-12-05T12:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.028897 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.028944 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.028957 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.028977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.028992 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.132275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.132321 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.132333 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.132354 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.132367 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.235778 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.235857 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.235888 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.235906 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.235916 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.339215 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.339263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.339272 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.339290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.339300 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.442101 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.442159 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.442172 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.442192 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.442205 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.544977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.545017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.545027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.545043 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.545054 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.648288 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.648337 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.648346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.648362 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.648371 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.682602 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.682724 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:23 crc kubenswrapper[4711]: E1205 12:10:23.682765 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.682771 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:23 crc kubenswrapper[4711]: E1205 12:10:23.682887 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:23 crc kubenswrapper[4711]: E1205 12:10:23.682985 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.751545 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.751619 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.751636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.751656 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.751670 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.854363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.854418 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.854429 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.854446 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.854459 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.957098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.957161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.957173 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.957195 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:23 crc kubenswrapper[4711]: I1205 12:10:23.957209 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:23Z","lastTransitionTime":"2025-12-05T12:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.060336 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.060637 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.060717 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.060785 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.060866 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.163671 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.163719 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.163733 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.163785 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.163801 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.165950 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/2.log" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.166504 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/1.log" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.168810 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" exitCode=1 Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.168865 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.168904 4711 scope.go:117] "RemoveContainer" containerID="7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.169624 4711 scope.go:117] "RemoveContainer" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" Dec 05 12:10:24 crc kubenswrapper[4711]: E1205 12:10:24.169801 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ndz5q_openshift-ovn-kubernetes(3bce8b78-05d7-4003-9231-24d2e07f0c2a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.185816 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.196983 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.210763 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.225017 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.238777 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.254782 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.267192 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.267243 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.267276 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.267300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.267318 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.279772 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.294127 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.308361 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.324267 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.341023 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.357290 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.369591 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.371572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.371608 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.371617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.371635 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.371646 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.383231 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba5f7718-dcfb-40bf-85a6-d002cfd34ba5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.395290 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.407238 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.433657 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:23Z\\\",\\\"message\\\":\\\"12:10:22.930294 6479 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:22.930472 6479 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930503 6479 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930934 6479 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931341 6479 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931795 6479 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932057 6479 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932508 6479 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:22.932555 6479 factory.go:656] Stopping watch factory\\\\nI1205 12:10:22.932577 6479 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.450128 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:24Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.474367 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.474433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.474442 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.474461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.474471 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.577216 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.577260 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.577270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.577286 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.577298 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.680814 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.680853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.680862 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.680878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.680892 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.683266 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:24 crc kubenswrapper[4711]: E1205 12:10:24.683423 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.783318 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.783368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.783404 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.783424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.783436 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.885924 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.885977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.885987 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.886005 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.886015 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.988434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.988474 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.988489 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.988509 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:24 crc kubenswrapper[4711]: I1205 12:10:24.988523 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:24Z","lastTransitionTime":"2025-12-05T12:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.091671 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.091725 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.091739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.091756 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.091767 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:25Z","lastTransitionTime":"2025-12-05T12:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.174806 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/2.log" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.194500 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.194556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.194570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.194589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.194602 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:25Z","lastTransitionTime":"2025-12-05T12:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.297799 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.297848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.297858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.297877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.297888 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:25Z","lastTransitionTime":"2025-12-05T12:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.401286 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.401337 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.401346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.401364 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.401375 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:25Z","lastTransitionTime":"2025-12-05T12:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.504190 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.504241 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.504253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.504279 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.504294 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:25Z","lastTransitionTime":"2025-12-05T12:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.607465 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.607519 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.607533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.607552 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.607567 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:25Z","lastTransitionTime":"2025-12-05T12:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.682491 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.682546 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.682679 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:25 crc kubenswrapper[4711]: E1205 12:10:25.682836 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:25 crc kubenswrapper[4711]: E1205 12:10:25.682987 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:25 crc kubenswrapper[4711]: E1205 12:10:25.683112 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.711223 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.711273 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.711282 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.711300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.711312 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:25Z","lastTransitionTime":"2025-12-05T12:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.814100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.814154 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.814164 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.814183 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.814194 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:25Z","lastTransitionTime":"2025-12-05T12:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.917162 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.917205 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.917217 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.917236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:25 crc kubenswrapper[4711]: I1205 12:10:25.917246 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:25Z","lastTransitionTime":"2025-12-05T12:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.019992 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.020042 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.020055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.020074 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.020085 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.123668 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.123734 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.123747 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.123777 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.123790 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.226655 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.226714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.226724 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.226742 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.226752 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.330265 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.330338 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.330351 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.330422 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.330439 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.433178 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.433243 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.433255 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.433277 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.433289 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.536015 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.536064 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.536076 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.536093 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.536107 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.639054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.639099 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.639107 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.639123 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.639133 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.682552 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:26 crc kubenswrapper[4711]: E1205 12:10:26.682737 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.742808 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.742869 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.742884 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.742906 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.742922 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.847353 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.847491 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.847563 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.847603 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.847631 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.950606 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.950648 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.950657 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.950681 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:26 crc kubenswrapper[4711]: I1205 12:10:26.950699 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:26Z","lastTransitionTime":"2025-12-05T12:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.053009 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.053270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.053289 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.053311 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.053323 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.155707 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.155746 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.155755 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.155769 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.155780 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.227743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.227835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.227858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.227891 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.227921 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: E1205 12:10:27.245862 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:27Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.250505 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.250555 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.250564 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.250583 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.250594 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: E1205 12:10:27.264137 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:27Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.268351 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.268413 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.268422 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.268441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.268451 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: E1205 12:10:27.281827 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:27Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.286288 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.286356 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.286376 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.286441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.286461 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: E1205 12:10:27.299920 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:27Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.304744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.304815 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.304833 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.304855 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.304874 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: E1205 12:10:27.318901 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:27Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:27 crc kubenswrapper[4711]: E1205 12:10:27.319076 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.320961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.321004 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.321018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.321043 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.321070 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.424073 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.424132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.424144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.424167 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.424180 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.526882 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.526928 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.526940 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.526957 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.526970 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.629556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.629589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.629602 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.629619 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.629630 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.683003 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.683109 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:27 crc kubenswrapper[4711]: E1205 12:10:27.683160 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.683004 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:27 crc kubenswrapper[4711]: E1205 12:10:27.683270 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:27 crc kubenswrapper[4711]: E1205 12:10:27.683492 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.732855 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.732928 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.732943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.732965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.732982 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.835774 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.835833 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.835847 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.835867 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.835881 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.938669 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.938722 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.938734 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.938755 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:27 crc kubenswrapper[4711]: I1205 12:10:27.938769 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:27Z","lastTransitionTime":"2025-12-05T12:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.041079 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.041130 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.041148 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.041169 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.041180 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.143610 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.143665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.143674 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.143692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.143704 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.246783 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.246846 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.246858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.246875 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.246888 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.350020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.350090 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.350103 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.350125 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.350142 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.453380 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.453444 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.453455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.453472 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.453484 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.555850 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.555921 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.555935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.555956 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.555973 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.658493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.658547 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.658565 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.658586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.658601 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.680197 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:28 crc kubenswrapper[4711]: E1205 12:10:28.680338 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:10:28 crc kubenswrapper[4711]: E1205 12:10:28.680420 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs podName:fa5afbc1-d1a4-40c8-990d-72a8169d5072 nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.680400886 +0000 UTC m=+106.264723216 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs") pod "network-metrics-daemon-hv9gb" (UID: "fa5afbc1-d1a4-40c8-990d-72a8169d5072") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.682112 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:28 crc kubenswrapper[4711]: E1205 12:10:28.682235 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.695469 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.712309 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.726482 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.740495 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.761967 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.762003 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.762014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.762047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.762058 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.763414 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.778411 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.792516 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.806588 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.818136 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.830615 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.840794 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.851815 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba5f7718-dcfb-40bf-85a6-d002cfd34ba5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.862665 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.864544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.864589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.864598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.864612 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.864621 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.875023 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.895307 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:23Z\\\",\\\"message\\\":\\\"12:10:22.930294 6479 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:22.930472 6479 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930503 6479 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930934 6479 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931341 6479 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931795 6479 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932057 6479 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932508 6479 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:22.932555 6479 factory.go:656] Stopping watch factory\\\\nI1205 12:10:22.932577 6479 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.911267 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.925374 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.936034 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:28Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.966825 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.966878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.966887 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.966903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:28 crc kubenswrapper[4711]: I1205 12:10:28.966913 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:28Z","lastTransitionTime":"2025-12-05T12:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.069890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.069944 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.069957 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.069976 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.069989 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.173173 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.173253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.173264 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.173286 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.173300 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.274964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.275005 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.275017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.275034 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.275046 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.378443 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.378486 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.378498 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.378514 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.378528 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.481378 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.481444 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.481456 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.481477 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.481492 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.584071 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.584110 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.584118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.584133 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.584142 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.682451 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.682489 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.682507 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:29 crc kubenswrapper[4711]: E1205 12:10:29.682622 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:29 crc kubenswrapper[4711]: E1205 12:10:29.682732 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:29 crc kubenswrapper[4711]: E1205 12:10:29.682823 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.686726 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.686776 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.686789 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.686806 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.686819 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.789311 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.789369 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.789383 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.789432 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.789449 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.892241 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.892294 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.892308 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.892325 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.892337 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.995033 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.995094 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.995107 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.995131 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:29 crc kubenswrapper[4711]: I1205 12:10:29.995146 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:29Z","lastTransitionTime":"2025-12-05T12:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.098809 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.098879 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.098897 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.098923 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.098944 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:30Z","lastTransitionTime":"2025-12-05T12:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.201633 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.201681 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.201692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.201711 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.201723 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:30Z","lastTransitionTime":"2025-12-05T12:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.304560 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.304610 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.304626 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.304647 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.304662 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:30Z","lastTransitionTime":"2025-12-05T12:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.408112 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.408150 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.408169 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.408186 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.408197 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:30Z","lastTransitionTime":"2025-12-05T12:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.510938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.510994 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.511006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.511023 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.511035 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:30Z","lastTransitionTime":"2025-12-05T12:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.613679 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.613723 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.613734 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.613754 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.613764 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:30Z","lastTransitionTime":"2025-12-05T12:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.682449 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:30 crc kubenswrapper[4711]: E1205 12:10:30.682638 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.716714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.716771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.716783 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.716858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.716873 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:30Z","lastTransitionTime":"2025-12-05T12:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.819462 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.819505 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.819514 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.819530 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.819540 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:30Z","lastTransitionTime":"2025-12-05T12:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.921746 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.921790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.921800 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.921817 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:30 crc kubenswrapper[4711]: I1205 12:10:30.921829 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:30Z","lastTransitionTime":"2025-12-05T12:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.024715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.024784 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.024794 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.024816 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.024829 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.127923 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.127975 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.127986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.128004 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.128018 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.231505 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.231567 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.231579 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.231602 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.231615 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.334060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.334134 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.334148 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.334168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.334182 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.437285 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.437327 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.437338 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.437415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.437432 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.540535 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.540584 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.540595 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.540614 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.540627 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.643685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.643754 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.643768 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.643790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.643809 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.683017 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:31 crc kubenswrapper[4711]: E1205 12:10:31.683168 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.683031 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.683288 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:31 crc kubenswrapper[4711]: E1205 12:10:31.683523 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:31 crc kubenswrapper[4711]: E1205 12:10:31.683626 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.746848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.746896 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.746908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.746925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.746939 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.850050 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.850118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.850131 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.850152 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.850166 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.954623 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.954697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.954709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.954729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:31 crc kubenswrapper[4711]: I1205 12:10:31.954742 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:31Z","lastTransitionTime":"2025-12-05T12:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.057685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.057726 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.057735 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.057756 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.057766 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.160631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.160732 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.160744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.160767 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.160779 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.203460 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgkqk_0df94722-138f-4247-b308-3e3ccadc54b5/kube-multus/0.log" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.203516 4711 generic.go:334] "Generic (PLEG): container finished" podID="0df94722-138f-4247-b308-3e3ccadc54b5" containerID="76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019" exitCode=1 Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.203548 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgkqk" event={"ID":"0df94722-138f-4247-b308-3e3ccadc54b5","Type":"ContainerDied","Data":"76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.203903 4711 scope.go:117] "RemoveContainer" containerID="76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.226738 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba5f7718-dcfb-40bf-85a6-d002cfd34ba5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.245063 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.259973 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.264564 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.264619 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.264630 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.264648 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.264660 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.285172 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:23Z\\\",\\\"message\\\":\\\"12:10:22.930294 6479 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:22.930472 6479 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930503 6479 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930934 6479 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931341 6479 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931795 6479 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932057 6479 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932508 6479 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:22.932555 6479 factory.go:656] Stopping watch factory\\\\nI1205 12:10:22.932577 6479 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.303041 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.319987 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:31Z\\\",\\\"message\\\":\\\"2025-12-05T12:09:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d\\\\n2025-12-05T12:09:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d to /host/opt/cni/bin/\\\\n2025-12-05T12:09:46Z [verbose] multus-daemon started\\\\n2025-12-05T12:09:46Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:10:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.333990 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.350902 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.366535 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.368250 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.368298 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.368309 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.368333 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.368347 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.382556 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.398611 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.411750 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.431633 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.445444 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.457658 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.471952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.472023 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.472044 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.472070 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.472087 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.472735 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.485062 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.500197 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.575836 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.575884 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.575894 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.575911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.575922 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.678121 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.678169 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.678178 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.678197 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.678208 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.682544 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:32 crc kubenswrapper[4711]: E1205 12:10:32.682686 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.781556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.781597 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.781614 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.781634 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.781654 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.883837 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.883883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.883894 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.883913 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.883926 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.986644 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.986688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.986698 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.986713 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:32 crc kubenswrapper[4711]: I1205 12:10:32.986726 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:32Z","lastTransitionTime":"2025-12-05T12:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.090276 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.090361 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.090417 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.090452 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.090474 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:33Z","lastTransitionTime":"2025-12-05T12:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.194009 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.194064 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.194079 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.194100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.194114 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:33Z","lastTransitionTime":"2025-12-05T12:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.209341 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgkqk_0df94722-138f-4247-b308-3e3ccadc54b5/kube-multus/0.log" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.209426 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgkqk" event={"ID":"0df94722-138f-4247-b308-3e3ccadc54b5","Type":"ContainerStarted","Data":"4d80323912b8c8bcc005d1ec2e77b2b6b43a8b184779b7d98e44dc3a1e6000e0"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.231006 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.249709 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.266273 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.283197 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.297514 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.297591 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.297608 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.297655 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.297668 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:33Z","lastTransitionTime":"2025-12-05T12:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.302088 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.316491 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.328591 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.342094 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.351990 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.371577 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.388932 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.401786 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.404913 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.404954 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.404965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.404985 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.404997 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:33Z","lastTransitionTime":"2025-12-05T12:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.413211 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.429878 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:23Z\\\",\\\"message\\\":\\\"12:10:22.930294 6479 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:22.930472 6479 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930503 6479 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930934 6479 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931341 6479 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931795 6479 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932057 6479 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932508 6479 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:22.932555 6479 factory.go:656] Stopping watch factory\\\\nI1205 12:10:22.932577 6479 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.442518 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.452686 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba5f7718-dcfb-40bf-85a6-d002cfd34ba5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.465217 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d80323912b8c8bcc005d1ec2e77b2b6b43a8b184779b7d98e44dc3a1e6000e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:31Z\\\",\\\"message\\\":\\\"2025-12-05T12:09:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d\\\\n2025-12-05T12:09:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d to /host/opt/cni/bin/\\\\n2025-12-05T12:09:46Z [verbose] multus-daemon started\\\\n2025-12-05T12:09:46Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:10:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.474448 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.507072 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.507123 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.507133 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.507149 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.507160 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:33Z","lastTransitionTime":"2025-12-05T12:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.609745 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.609806 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.609823 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.609847 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.609864 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:33Z","lastTransitionTime":"2025-12-05T12:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.683050 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.683050 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.683050 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:33 crc kubenswrapper[4711]: E1205 12:10:33.683437 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:33 crc kubenswrapper[4711]: E1205 12:10:33.683527 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:33 crc kubenswrapper[4711]: E1205 12:10:33.683217 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.713161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.713238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.713262 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.713291 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.713313 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:33Z","lastTransitionTime":"2025-12-05T12:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.815901 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.815951 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.815960 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.815974 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.815984 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:33Z","lastTransitionTime":"2025-12-05T12:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.918280 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.918364 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.918375 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.918414 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:33 crc kubenswrapper[4711]: I1205 12:10:33.918428 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:33Z","lastTransitionTime":"2025-12-05T12:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.022178 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.022271 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.022300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.022336 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.022364 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.125298 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.125357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.125369 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.125412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.125435 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.228311 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.228424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.228458 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.228489 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.228517 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.331174 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.331225 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.331235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.331254 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.331265 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.433839 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.433877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.433887 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.433903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.433914 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.536948 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.537004 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.537017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.537039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.537052 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.640380 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.640480 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.640497 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.640524 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.640543 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.683178 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:34 crc kubenswrapper[4711]: E1205 12:10:34.683434 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.744144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.744197 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.744208 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.744226 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.744238 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.847569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.847631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.847645 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.847672 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.847688 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.950807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.950875 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.950885 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.950902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:34 crc kubenswrapper[4711]: I1205 12:10:34.950913 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:34Z","lastTransitionTime":"2025-12-05T12:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.053742 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.053802 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.053813 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.053831 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.053844 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.156603 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.156639 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.156650 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.156667 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.156678 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.259544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.259603 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.259616 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.259636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.259650 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.362816 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.362878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.362892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.362915 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.362931 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.466703 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.466779 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.466796 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.466820 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.466836 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.569793 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.569916 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.569932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.569960 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.569980 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.673532 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.673608 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.673631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.673659 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.673683 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.683135 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.683183 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.683204 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:35 crc kubenswrapper[4711]: E1205 12:10:35.683322 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:35 crc kubenswrapper[4711]: E1205 12:10:35.683522 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:35 crc kubenswrapper[4711]: E1205 12:10:35.683651 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.777771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.777826 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.777840 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.777861 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.777873 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.881775 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.881820 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.881829 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.881847 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.881859 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.984511 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.984560 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.984569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.984585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:35 crc kubenswrapper[4711]: I1205 12:10:35.984597 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:35Z","lastTransitionTime":"2025-12-05T12:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.087727 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.087777 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.087789 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.087807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.087818 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:36Z","lastTransitionTime":"2025-12-05T12:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.191180 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.191239 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.191252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.191275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.191291 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:36Z","lastTransitionTime":"2025-12-05T12:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.294750 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.294818 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.294833 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.294859 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.294878 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:36Z","lastTransitionTime":"2025-12-05T12:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.397912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.397957 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.397970 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.397987 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.397997 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:36Z","lastTransitionTime":"2025-12-05T12:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.500117 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.500179 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.500194 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.500214 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.500227 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:36Z","lastTransitionTime":"2025-12-05T12:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.602821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.602870 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.602879 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.602894 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.602903 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:36Z","lastTransitionTime":"2025-12-05T12:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.682963 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:36 crc kubenswrapper[4711]: E1205 12:10:36.683132 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.704843 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.704878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.704886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.704899 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.704912 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:36Z","lastTransitionTime":"2025-12-05T12:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.807675 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.807724 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.807736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.807755 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.807769 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:36Z","lastTransitionTime":"2025-12-05T12:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.910336 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.910399 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.910409 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.910423 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:36 crc kubenswrapper[4711]: I1205 12:10:36.910433 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:36Z","lastTransitionTime":"2025-12-05T12:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.013698 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.013752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.013761 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.013783 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.013794 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.117836 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.117899 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.117915 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.117940 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.117959 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.223276 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.223329 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.223342 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.223360 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.223371 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.326982 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.327048 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.327062 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.327080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.327093 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.429770 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.429820 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.429853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.429871 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.429883 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.532526 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.532575 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.532586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.532607 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.532617 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.635066 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.635112 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.635124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.635177 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.635193 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.683125 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.683209 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.683140 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:37 crc kubenswrapper[4711]: E1205 12:10:37.683320 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:37 crc kubenswrapper[4711]: E1205 12:10:37.683516 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:37 crc kubenswrapper[4711]: E1205 12:10:37.683620 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.701825 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.701903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.701914 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.701931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.701942 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: E1205 12:10:37.715696 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:37Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.720593 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.720654 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.720667 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.720693 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.720708 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: E1205 12:10:37.735912 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:37Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.741835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.741902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.741927 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.741954 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.741969 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: E1205 12:10:37.756870 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:37Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.761743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.761805 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.761817 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.761835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.761847 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: E1205 12:10:37.775167 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:37Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.779312 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.779366 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.779379 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.779413 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.779425 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: E1205 12:10:37.792582 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:37Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:37 crc kubenswrapper[4711]: E1205 12:10:37.792736 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.795569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.795633 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.795656 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.795684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.795704 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.898230 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.898290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.898305 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.898323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:37 crc kubenswrapper[4711]: I1205 12:10:37.898335 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:37Z","lastTransitionTime":"2025-12-05T12:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.001690 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.001729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.001739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.001754 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.001765 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.104585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.104625 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.104636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.104653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.104665 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.207460 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.207512 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.207556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.207618 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.207643 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.311136 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.311203 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.311221 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.311240 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.311253 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.414186 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.414246 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.414257 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.414277 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.414291 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.517883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.517937 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.517947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.517964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.517975 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.622777 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.622847 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.622860 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.622886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.622900 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.682779 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:38 crc kubenswrapper[4711]: E1205 12:10:38.682993 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.683856 4711 scope.go:117] "RemoveContainer" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" Dec 05 12:10:38 crc kubenswrapper[4711]: E1205 12:10:38.684030 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ndz5q_openshift-ovn-kubernetes(3bce8b78-05d7-4003-9231-24d2e07f0c2a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.698108 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d80323912b8c8bcc005d1ec2e77b2b6b43a8b184779b7d98e44dc3a1e6000e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:31Z\\\",\\\"message\\\":\\\"2025-12-05T12:09:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d\\\\n2025-12-05T12:09:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d to /host/opt/cni/bin/\\\\n2025-12-05T12:09:46Z [verbose] multus-daemon started\\\\n2025-12-05T12:09:46Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:10:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.710205 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.720516 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.725216 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.725255 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.725263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.725279 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.725292 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.732664 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.745925 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.756652 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.771568 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.784185 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.794481 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.809249 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.819728 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.827781 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.827830 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.827846 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.827864 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.827877 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.839253 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.852537 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.864944 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.876494 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.893424 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7914211dc4c53c0fce2abf881df6000e38217ed7641700bdb47f202c571c8b71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:05Z\\\",\\\"message\\\":\\\"efault: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.244\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 12:10:04.855972 6211 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855996 6211 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI1205 12:10:04.855967 6211 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1205 12:10:04.855825 6211 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin networ\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:23Z\\\",\\\"message\\\":\\\"12:10:22.930294 6479 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:22.930472 6479 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930503 6479 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930934 6479 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931341 6479 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931795 6479 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932057 6479 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932508 6479 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:22.932555 6479 factory.go:656] Stopping watch factory\\\\nI1205 12:10:22.932577 6479 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.907589 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.917910 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba5f7718-dcfb-40bf-85a6-d002cfd34ba5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.932074 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.932114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.932126 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.932144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.932160 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:38Z","lastTransitionTime":"2025-12-05T12:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.940680 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:23Z\\\",\\\"message\\\":\\\"12:10:22.930294 6479 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:22.930472 6479 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930503 6479 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930934 6479 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931341 6479 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931795 6479 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932057 6479 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932508 6479 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:22.932555 6479 factory.go:656] Stopping watch factory\\\\nI1205 12:10:22.932577 6479 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ndz5q_openshift-ovn-kubernetes(3bce8b78-05d7-4003-9231-24d2e07f0c2a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.954736 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.967909 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba5f7718-dcfb-40bf-85a6-d002cfd34ba5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.979349 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:38 crc kubenswrapper[4711]: I1205 12:10:38.991894 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:38Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.005593 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d80323912b8c8bcc005d1ec2e77b2b6b43a8b184779b7d98e44dc3a1e6000e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:31Z\\\",\\\"message\\\":\\\"2025-12-05T12:09:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d\\\\n2025-12-05T12:09:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d to /host/opt/cni/bin/\\\\n2025-12-05T12:09:46Z [verbose] multus-daemon started\\\\n2025-12-05T12:09:46Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:10:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.016636 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.028973 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.034346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.034378 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.034431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.034448 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.034458 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.041153 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.053181 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.065353 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.074955 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.087606 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.098320 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.116847 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.130000 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.137331 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.137406 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.137421 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.137439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.137452 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.143359 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.155999 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:39Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.240023 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.240063 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.240073 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.240094 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.240109 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.342639 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.342684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.342693 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.342709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.342718 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.449637 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.449697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.449714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.449736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.449755 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.552437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.552537 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.552554 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.552892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.552935 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.655549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.655586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.655595 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.655613 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.655624 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.682584 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.682669 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.682693 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:39 crc kubenswrapper[4711]: E1205 12:10:39.682736 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:39 crc kubenswrapper[4711]: E1205 12:10:39.682838 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:39 crc kubenswrapper[4711]: E1205 12:10:39.683041 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.757653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.757697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.757706 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.757723 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.757732 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.861311 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.861412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.861425 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.861447 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.861460 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.964876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.964925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.964935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.964952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:39 crc kubenswrapper[4711]: I1205 12:10:39.964964 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:39Z","lastTransitionTime":"2025-12-05T12:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.068076 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.068128 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.068140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.068157 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.068191 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.171492 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.171551 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.171565 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.171587 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.171603 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.274358 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.274417 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.274427 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.274441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.274453 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.377206 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.377296 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.377309 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.377326 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.377358 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.480882 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.480932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.480945 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.480963 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.480975 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.583484 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.583541 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.583552 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.583572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.583586 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.682911 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:40 crc kubenswrapper[4711]: E1205 12:10:40.683134 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.687011 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.687052 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.687066 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.687084 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.687095 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.790680 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.790726 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.790735 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.790753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.790763 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.894036 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.894099 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.894113 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.894132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.894144 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.997588 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.997646 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.997655 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.997674 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:40 crc kubenswrapper[4711]: I1205 12:10:40.997692 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:40Z","lastTransitionTime":"2025-12-05T12:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.101001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.101061 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.101075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.101098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.101116 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:41Z","lastTransitionTime":"2025-12-05T12:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.204009 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.204061 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.204071 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.204089 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.204102 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:41Z","lastTransitionTime":"2025-12-05T12:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.307492 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.307554 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.307570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.307593 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.307609 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:41Z","lastTransitionTime":"2025-12-05T12:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.410862 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.410909 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.410919 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.410935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.410946 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:41Z","lastTransitionTime":"2025-12-05T12:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.514900 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.514955 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.514966 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.514983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.514994 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:41Z","lastTransitionTime":"2025-12-05T12:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.618115 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.618234 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.618262 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.618295 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.618319 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:41Z","lastTransitionTime":"2025-12-05T12:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.683166 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:41 crc kubenswrapper[4711]: E1205 12:10:41.683380 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.683185 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.683166 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:41 crc kubenswrapper[4711]: E1205 12:10:41.683524 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:41 crc kubenswrapper[4711]: E1205 12:10:41.683663 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.722498 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.722573 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.722596 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.722617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.722629 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:41Z","lastTransitionTime":"2025-12-05T12:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.824986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.825022 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.825031 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.825046 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.825057 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:41Z","lastTransitionTime":"2025-12-05T12:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.927236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.927288 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.927300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.927317 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:41 crc kubenswrapper[4711]: I1205 12:10:41.927328 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:41Z","lastTransitionTime":"2025-12-05T12:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.029873 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.030267 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.030357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.030454 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.030539 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.133591 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.133646 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.133659 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.133678 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.133691 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.235927 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.235972 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.235983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.236003 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.236016 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.339019 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.339081 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.339092 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.339112 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.339125 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.440926 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.440968 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.440979 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.440995 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.441006 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.543994 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.544059 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.544072 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.544092 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.544106 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.647153 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.647202 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.647213 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.647236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.647252 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.682804 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:42 crc kubenswrapper[4711]: E1205 12:10:42.682966 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.750355 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.750431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.750443 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.750461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.750475 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.855024 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.855109 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.855126 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.855168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.855183 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.958651 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.958743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.958759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.958784 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:42 crc kubenswrapper[4711]: I1205 12:10:42.958801 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:42Z","lastTransitionTime":"2025-12-05T12:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.061442 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.061499 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.061511 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.061531 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.061545 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.164786 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.164863 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.164877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.164897 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.164913 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.268097 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.268161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.268170 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.268194 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.268206 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.370810 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.370874 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.370886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.370908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.370922 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.474382 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.474446 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.474455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.474505 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.474519 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.577159 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.577201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.577214 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.577233 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.577246 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.680567 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.680628 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.680643 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.680663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.680675 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.683148 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.683209 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.683264 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:43 crc kubenswrapper[4711]: E1205 12:10:43.683666 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:43 crc kubenswrapper[4711]: E1205 12:10:43.683793 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:43 crc kubenswrapper[4711]: E1205 12:10:43.683977 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.697332 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.783644 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.783701 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.783715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.783734 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.783748 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.887736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.887819 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.887828 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.887850 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.887862 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.953796 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.953953 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.954023 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:43 crc kubenswrapper[4711]: E1205 12:10:43.954124 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:10:43 crc kubenswrapper[4711]: E1205 12:10:43.954142 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:47.954101421 +0000 UTC m=+153.538423761 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:43 crc kubenswrapper[4711]: E1205 12:10:43.954200 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:11:47.954180303 +0000 UTC m=+153.538502813 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:10:43 crc kubenswrapper[4711]: E1205 12:10:43.954248 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:10:43 crc kubenswrapper[4711]: E1205 12:10:43.954413 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:11:47.954353637 +0000 UTC m=+153.538675967 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.990861 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.990932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.990947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.990987 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:43 crc kubenswrapper[4711]: I1205 12:10:43.991000 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:43Z","lastTransitionTime":"2025-12-05T12:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.055197 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.055326 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:44 crc kubenswrapper[4711]: E1205 12:10:44.055532 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:10:44 crc kubenswrapper[4711]: E1205 12:10:44.055616 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:10:44 crc kubenswrapper[4711]: E1205 12:10:44.055570 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:10:44 crc kubenswrapper[4711]: E1205 12:10:44.055689 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:10:44 crc kubenswrapper[4711]: E1205 12:10:44.055706 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:10:44 crc kubenswrapper[4711]: E1205 12:10:44.055631 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:10:44 crc kubenswrapper[4711]: E1205 12:10:44.055956 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:11:48.055922603 +0000 UTC m=+153.640244933 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:10:44 crc kubenswrapper[4711]: E1205 12:10:44.056030 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:11:48.056007995 +0000 UTC m=+153.640330325 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.093545 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.093622 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.093636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.093661 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.093703 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:44Z","lastTransitionTime":"2025-12-05T12:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.196503 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.196556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.196564 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.196582 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.196592 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:44Z","lastTransitionTime":"2025-12-05T12:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.299555 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.299637 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.299652 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.299671 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.299683 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:44Z","lastTransitionTime":"2025-12-05T12:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.402302 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.402365 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.402380 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.402435 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.402453 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:44Z","lastTransitionTime":"2025-12-05T12:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.506225 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.506270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.506281 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.506306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.506322 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:44Z","lastTransitionTime":"2025-12-05T12:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.608849 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.608888 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.608897 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.608932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.608943 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:44Z","lastTransitionTime":"2025-12-05T12:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.682702 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:44 crc kubenswrapper[4711]: E1205 12:10:44.682834 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.711508 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.711575 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.711598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.711629 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.711652 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:44Z","lastTransitionTime":"2025-12-05T12:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.814447 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.814494 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.814503 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.814520 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.814530 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:44Z","lastTransitionTime":"2025-12-05T12:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.917825 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.917876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.917887 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.917904 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:44 crc kubenswrapper[4711]: I1205 12:10:44.917915 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:44Z","lastTransitionTime":"2025-12-05T12:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.020055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.020109 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.020121 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.020142 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.020155 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.124229 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.124288 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.124299 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.124316 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.124328 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.227101 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.227163 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.227183 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.227246 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.227267 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.330841 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.330938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.330959 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.330981 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.330997 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.434226 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.434270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.434281 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.434300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.434312 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.536686 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.536728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.536739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.536756 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.536768 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.639888 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.639942 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.639955 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.639975 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.639987 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.682639 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.682680 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.682738 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:45 crc kubenswrapper[4711]: E1205 12:10:45.682788 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:45 crc kubenswrapper[4711]: E1205 12:10:45.682904 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:45 crc kubenswrapper[4711]: E1205 12:10:45.683099 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.742961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.743027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.743037 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.743054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.743065 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.846440 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.846494 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.846506 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.846523 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.846534 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.950212 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.950265 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.950279 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.950301 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:45 crc kubenswrapper[4711]: I1205 12:10:45.950315 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:45Z","lastTransitionTime":"2025-12-05T12:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.053178 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.053221 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.053230 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.053247 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.053258 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.156256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.156306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.156319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.156338 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.156352 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.258526 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.258597 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.258610 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.258631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.258651 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.361853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.361911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.361921 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.361945 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.361959 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.465029 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.465087 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.465100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.465122 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.465137 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.568438 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.568496 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.568508 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.568528 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.568540 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.671094 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.671132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.671144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.671159 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.671171 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.682748 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:46 crc kubenswrapper[4711]: E1205 12:10:46.682877 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.773790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.773858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.773875 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.773902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.773920 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.876528 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.876585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.876598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.876617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.876630 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.980190 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.980235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.980244 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.980259 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:46 crc kubenswrapper[4711]: I1205 12:10:46.980269 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:46Z","lastTransitionTime":"2025-12-05T12:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.083856 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.083964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.083992 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.084027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.084052 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:47Z","lastTransitionTime":"2025-12-05T12:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.187420 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.187475 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.187498 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.187521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.187536 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:47Z","lastTransitionTime":"2025-12-05T12:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.290723 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.290793 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.290806 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.290827 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.290841 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:47Z","lastTransitionTime":"2025-12-05T12:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.394675 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.394728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.394753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.394781 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.394796 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:47Z","lastTransitionTime":"2025-12-05T12:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.496893 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.496934 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.496945 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.496964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.496984 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:47Z","lastTransitionTime":"2025-12-05T12:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.599526 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.599572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.599585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.599602 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.599612 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:47Z","lastTransitionTime":"2025-12-05T12:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.682541 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.682629 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.682658 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:47 crc kubenswrapper[4711]: E1205 12:10:47.682831 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:47 crc kubenswrapper[4711]: E1205 12:10:47.682907 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:47 crc kubenswrapper[4711]: E1205 12:10:47.683012 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.702368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.702449 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.702469 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.702493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.702513 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:47Z","lastTransitionTime":"2025-12-05T12:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.805415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.805461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.805475 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.805492 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.805504 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:47Z","lastTransitionTime":"2025-12-05T12:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.908846 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.908919 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.908931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.908949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:47 crc kubenswrapper[4711]: I1205 12:10:47.908963 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:47Z","lastTransitionTime":"2025-12-05T12:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.012236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.013479 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.013566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.013595 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.013610 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.036886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.036948 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.036958 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.036976 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.036986 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: E1205 12:10:48.050419 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.055510 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.055546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.055557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.055572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.055584 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: E1205 12:10:48.066942 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.070918 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.070965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.070997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.071017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.071030 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: E1205 12:10:48.087344 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.092464 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.092515 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.092526 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.092543 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.092573 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: E1205 12:10:48.105907 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.110168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.110203 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.110237 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.110276 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.110295 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: E1205 12:10:48.123374 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"728a4e24-f371-4cb9-97e1-b9890e024e7d\\\",\\\"systemUUID\\\":\\\"5acb70a7-4bdd-4a98-adeb-a7abc79182a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: E1205 12:10:48.123527 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.125641 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.125697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.125712 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.125741 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.125760 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.228769 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.228811 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.228821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.228853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.228862 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.331519 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.331562 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.331573 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.331591 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.331607 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.434067 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.434105 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.434114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.434129 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.434140 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.536986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.537373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.537507 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.537587 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.537669 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.642230 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.642718 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.642799 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.642918 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.642999 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.683077 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:48 crc kubenswrapper[4711]: E1205 12:10:48.689661 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.706825 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgkqk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0df94722-138f-4247-b308-3e3ccadc54b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d80323912b8c8bcc005d1ec2e77b2b6b43a8b184779b7d98e44dc3a1e6000e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:31Z\\\",\\\"message\\\":\\\"2025-12-05T12:09:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d\\\\n2025-12-05T12:09:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3e607cbf-af59-4c6e-90c6-a430b728600d to /host/opt/cni/bin/\\\\n2025-12-05T12:09:46Z [verbose] multus-daemon started\\\\n2025-12-05T12:09:46Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:10:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpc4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgkqk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.719831 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-k95n8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fd49ea4-f954-4aed-969e-2f913b5172b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76b72eb675436d3478454123e0e3cd1898ac5f85e2239065b8e2928bf8f1dc36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjcl4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-k95n8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.739967 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac6514cc-e4c8-4918-9010-e819d7bc7b32\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:09:39Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 12:09:33.702035 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:09:33.706351 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3449439963/tls.crt::/tmp/serving-cert-3449439963/tls.key\\\\\\\"\\\\nI1205 12:09:39.737335 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:09:39.746843 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:09:39.746888 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:09:39.746923 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:09:39.746941 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:09:39.759319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:09:39.759593 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759660 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:09:39.759692 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:09:39.759716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:09:39.759770 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:09:39.759794 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:09:39.759376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:09:39.764294 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.745890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.745938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.745947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.745963 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.745975 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.755252 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.767887 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.781041 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c3b459ccaea66aa4f06fa7c22ae254d61664fb883e32c5ac37576d41fbf48c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90a4d21e224be89a5c5fe8b60c71b213b322e17287ac8a476c15f3fa9338dbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.796283 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r45dj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cb3908e1-8749-46d7-a003-a9f7a8574715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95eb057e591a64e621b1f8d126bc1f1fb6c1370265c0df634a3afb429737546d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzl57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r45dj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.806893 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af1537c8-0de1-476a-8001-2904bb594b76\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff3aa93c346a4d885e5b2fcffb67c5d2365583c23ef5cf9904458511bee24e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444855522e58954e2b8d4ac7e2d1202cbb0dd1e6bd01991ac411e6b93eef635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs7vd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7zqqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.818090 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5afbc1-d1a4-40c8-990d-72a8169d5072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6rmm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hv9gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.839212 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2717761-3feb-41a1-9fed-3721960fa653\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85dd1725c4a6dcc193dad2564fe53f4d176e6d841bf022ee727b1343123ca9be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9555c853cfce84995aff3d75ab4f86cd36c8e687f5ceb7632282115264722575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faeef18724ddce50a92cda5ce286d920232241089aed0be6eb7386eeee276687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22d9f023fd3cb30ad9b1a680da53879bdd5f0deb28b21d55e2359c8196bfe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a74dc794607f64008f81d702d76bce2aa99703f81daafcba661794cce3ec6787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453cd23d4ddc438606c9188d36d198281d9e0c4559730f19a6cefe02696ec038\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceacfaaf2c010f6d7c9c919f8bb70a00b85d3ed4c1be6671b1ee752ba1b1f686\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09b315d2111f1ad8577bf73713247becdf0ffd93c22ae9bdb51abdaa344899a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.848401 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.848461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.848474 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.848493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.848507 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.855302 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f76cb9f0-2266-4e9b-9d8d-d1e2a6633e8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://392bd200f06871a71d53a3e1e5c59aa44a005acd10bdbb22d198f154bdfce61e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e04f3c1e7fbf5d85c221cfc55fabf6d3d4e860ba1b17255761b3d0caf1763bd6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://260897742bf4e557da609c471d96552bf9c0baba96e88b4ed9362da9ad5f02e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.868333 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d090da8ea7c9b0a2fd59364d0215374cc63928f6e7683ad7ee7eb84de9c3ad07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.879295 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.897771 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bce8b78-05d7-4003-9231-24d2e07f0c2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:10:23Z\\\",\\\"message\\\":\\\"12:10:22.930294 6479 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:10:22.930472 6479 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930503 6479 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 12:10:22.930934 6479 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931341 6479 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.931795 6479 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932057 6479 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 12:10:22.932508 6479 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:10:22.932555 6479 factory.go:656] Stopping watch factory\\\\nI1205 12:10:22.932577 6479 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:10:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ndz5q_openshift-ovn-kubernetes(3bce8b78-05d7-4003-9231-24d2e07f0c2a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fj2xq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ndz5q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.915376 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6debde47-b5cc-400a-b7fc-0419770d0bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2e342b5d6928dd86bb41ea4460baafafeb3d1a711493c30b75af3f2d932f031\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d9249b0008add98336d3c9495cf9df6abc9cad28b72ab23f2a51d57d6c9b9e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb394ccecf07849193811cd1d09be46f5d657fab7aa2712acc049d1ccfdfec37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://530edc57fce9308cbc3716a7c62fb70d43a334d3a029ce4e63e8f28dbdcffe41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://addc3ff2957b9d0f817af260624aea52b3e35b27fcc4019b9e3cfdaa20110513\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b0785fd8270cdbdfe7a240c811ab7395a8800b32a3936f65ae7c8a8e8dca253\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e13cee275369b11adc170c5f436aa4d5bcaff232668a24cd7ddeb9977c8477\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f5pxw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6jvvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.930478 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6415393-a29e-4ad7-ae77-846fd77802bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7baf11a8cb36d32397e598bf52c94ff8718477a122ffb89b316b9eae2825b1ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3a663be5e2f0bea8ec1a0a593ece380683f187263f85f80d996e914aea4e48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3a663be5e2f0bea8ec1a0a593ece380683f187263f85f80d996e914aea4e48\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.946846 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba5f7718-dcfb-40bf-85a6-d002cfd34ba5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:10:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29156c0fe687abcd248dbb1062eb72e37fb27a54f050b810278ffc1a35ec2d73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8bd7c0d16741f92723c3bfcd1aa9ddbaed0f8a02f9b18853d0dfaaf2fa89978\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9209eaad9528d575026a12838413f9de2a577da96fbadadce20d1d660a6ac7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f3ba6c91b33fc2daf2f5cf103c03f9efb6ec6db26fcafc645081491377a9749\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:09:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:09:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.951384 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.951456 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.951467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.951485 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.951496 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:48Z","lastTransitionTime":"2025-12-05T12:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.962054 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed617edc261e22b3f950a8cc29524ce537a4ef5990a871725d2e11b5ac170b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:48 crc kubenswrapper[4711]: I1205 12:10:48.974213 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47bc5fb6-f724-409c-9a04-3c5e50951dd0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c201f66ea658ed4ce4106d9a3bc4b6d1a0fd7ac7aaff5422f39dae918d9057e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:09:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gjvmx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:09:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-drklt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.055046 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.055123 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.055144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.055168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.055187 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.158303 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.158368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.158414 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.158437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.158452 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.261506 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.261560 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.261569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.261586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.261633 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.363922 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.364327 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.364442 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.364549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.364656 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.467796 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.467847 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.467858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.467878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.467895 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.570834 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.570920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.570975 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.571054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.571086 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.674650 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.674720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.674737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.674761 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.674783 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.682250 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.682328 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.682415 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:49 crc kubenswrapper[4711]: E1205 12:10:49.682511 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:49 crc kubenswrapper[4711]: E1205 12:10:49.682811 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:49 crc kubenswrapper[4711]: E1205 12:10:49.682907 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.779017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.779109 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.779123 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.779259 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.779283 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.882850 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.882912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.882925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.882944 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.882957 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.986364 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.986437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.986446 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.986463 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:49 crc kubenswrapper[4711]: I1205 12:10:49.986476 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:49Z","lastTransitionTime":"2025-12-05T12:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.089997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.090054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.090065 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.090082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.090093 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:50Z","lastTransitionTime":"2025-12-05T12:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.193176 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.193268 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.193284 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.193304 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.193319 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:50Z","lastTransitionTime":"2025-12-05T12:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.296488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.296537 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.296550 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.296567 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.296579 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:50Z","lastTransitionTime":"2025-12-05T12:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.399552 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.399595 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.399607 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.399625 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.399638 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:50Z","lastTransitionTime":"2025-12-05T12:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.503071 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.503115 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.503126 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.503144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.503158 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:50Z","lastTransitionTime":"2025-12-05T12:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.606248 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.606312 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.606334 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.606351 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.606365 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:50Z","lastTransitionTime":"2025-12-05T12:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.682494 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:50 crc kubenswrapper[4711]: E1205 12:10:50.682668 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.709361 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.709472 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.709499 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.709530 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.709553 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:50Z","lastTransitionTime":"2025-12-05T12:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.812235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.812300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.812317 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.812339 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.812356 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:50Z","lastTransitionTime":"2025-12-05T12:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.914684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.914753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.914773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.914803 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:50 crc kubenswrapper[4711]: I1205 12:10:50.914822 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:50Z","lastTransitionTime":"2025-12-05T12:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.019078 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.019134 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.019149 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.019169 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.019187 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.121839 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.121895 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.121908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.121926 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.121939 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.225478 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.225546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.225563 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.225589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.225609 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.328379 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.328506 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.328532 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.328561 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.328582 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.431931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.431982 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.431994 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.432014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.432026 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.534892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.534968 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.534985 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.535012 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.535030 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.638729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.638804 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.638825 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.638851 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.638866 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.682725 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.682824 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.683215 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:51 crc kubenswrapper[4711]: E1205 12:10:51.683491 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:51 crc kubenswrapper[4711]: E1205 12:10:51.683581 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:51 crc kubenswrapper[4711]: E1205 12:10:51.683724 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.683934 4711 scope.go:117] "RemoveContainer" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.742906 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.742952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.742965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.742989 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.743003 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.846482 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.846533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.846548 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.846570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.846584 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.949830 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.949890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.949909 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.949932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:51 crc kubenswrapper[4711]: I1205 12:10:51.949944 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:51Z","lastTransitionTime":"2025-12-05T12:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.052594 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.052651 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.052663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.052680 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.052692 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.156191 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.156243 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.156256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.156277 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.156291 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.259819 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.259890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.259905 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.259927 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.259941 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.286851 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/2.log" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.289574 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerStarted","Data":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.290240 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.362824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.363080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.363102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.363130 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.363151 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.465203 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.465256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.465269 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.465300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.465317 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.488564 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=71.488538129 podStartE2EDuration="1m11.488538129s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.469227125 +0000 UTC m=+98.053549475" watchObservedRunningTime="2025-12-05 12:10:52.488538129 +0000 UTC m=+98.072860469" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.488825 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=68.488819876 podStartE2EDuration="1m8.488819876s" podCreationTimestamp="2025-12-05 12:09:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.488789646 +0000 UTC m=+98.073111976" watchObservedRunningTime="2025-12-05 12:10:52.488819876 +0000 UTC m=+98.073142206" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.546118 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-r45dj" podStartSLOduration=71.546090224 podStartE2EDuration="1m11.546090224s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.545916089 +0000 UTC m=+98.130238419" watchObservedRunningTime="2025-12-05 12:10:52.546090224 +0000 UTC m=+98.130412554" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.568814 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.568876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.568887 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.568903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.568915 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.580718 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7zqqg" podStartSLOduration=70.580688463 podStartE2EDuration="1m10.580688463s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.564502426 +0000 UTC m=+98.148824756" watchObservedRunningTime="2025-12-05 12:10:52.580688463 +0000 UTC m=+98.165010793" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.581268 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=9.581261478 podStartE2EDuration="9.581261478s" podCreationTimestamp="2025-12-05 12:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.580331815 +0000 UTC m=+98.164654145" watchObservedRunningTime="2025-12-05 12:10:52.581261478 +0000 UTC m=+98.165583818" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.620601 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=37.620576894 podStartE2EDuration="37.620576894s" podCreationTimestamp="2025-12-05 12:10:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.60374451 +0000 UTC m=+98.188066850" watchObservedRunningTime="2025-12-05 12:10:52.620576894 +0000 UTC m=+98.204899224" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.638986 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podStartSLOduration=71.638963446 podStartE2EDuration="1m11.638963446s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.637693944 +0000 UTC m=+98.222016284" watchObservedRunningTime="2025-12-05 12:10:52.638963446 +0000 UTC m=+98.223285776" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.639116 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-hv9gb"] Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.639217 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:52 crc kubenswrapper[4711]: E1205 12:10:52.639308 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.666847 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podStartSLOduration=70.66682297 podStartE2EDuration="1m10.66682297s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.666751369 +0000 UTC m=+98.251073709" watchObservedRunningTime="2025-12-05 12:10:52.66682297 +0000 UTC m=+98.251145300" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.671490 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.671530 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.671542 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.671561 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.671572 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.697309 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-6jvvs" podStartSLOduration=71.697278409 podStartE2EDuration="1m11.697278409s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.697182796 +0000 UTC m=+98.281505126" watchObservedRunningTime="2025-12-05 12:10:52.697278409 +0000 UTC m=+98.281600749" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.717612 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-wgkqk" podStartSLOduration=71.717591418 podStartE2EDuration="1m11.717591418s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.717230479 +0000 UTC m=+98.301552819" watchObservedRunningTime="2025-12-05 12:10:52.717591418 +0000 UTC m=+98.301913748" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.729347 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-k95n8" podStartSLOduration=71.729322396 podStartE2EDuration="1m11.729322396s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.72823159 +0000 UTC m=+98.312553920" watchObservedRunningTime="2025-12-05 12:10:52.729322396 +0000 UTC m=+98.313644726" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.745996 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=73.745973515 podStartE2EDuration="1m13.745973515s" podCreationTimestamp="2025-12-05 12:09:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:10:52.745456332 +0000 UTC m=+98.329778682" watchObservedRunningTime="2025-12-05 12:10:52.745973515 +0000 UTC m=+98.330295845" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.790749 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.790800 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.790813 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.790833 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.790848 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.894148 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.894188 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.894200 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.894220 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.894230 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.998520 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.998573 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.998589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.998611 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:52 crc kubenswrapper[4711]: I1205 12:10:52.998626 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:52Z","lastTransitionTime":"2025-12-05T12:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.101975 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.102038 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.102051 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.102072 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.102087 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:53Z","lastTransitionTime":"2025-12-05T12:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.205281 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.205319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.205330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.205351 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.205364 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:53Z","lastTransitionTime":"2025-12-05T12:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.308014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.308084 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.308097 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.308117 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.308130 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:53Z","lastTransitionTime":"2025-12-05T12:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.411287 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.411330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.411344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.411363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.411379 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:53Z","lastTransitionTime":"2025-12-05T12:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.513541 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.513583 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.513595 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.513615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.513626 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:53Z","lastTransitionTime":"2025-12-05T12:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.616609 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.616666 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.616683 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.616711 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.616729 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:53Z","lastTransitionTime":"2025-12-05T12:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.682346 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.682536 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.682412 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:53 crc kubenswrapper[4711]: E1205 12:10:53.682648 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:10:53 crc kubenswrapper[4711]: E1205 12:10:53.682898 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:10:53 crc kubenswrapper[4711]: E1205 12:10:53.682966 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.720812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.720866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.720881 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.720901 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.720915 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:53Z","lastTransitionTime":"2025-12-05T12:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.824403 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.824470 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.824479 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.824498 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.824510 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:53Z","lastTransitionTime":"2025-12-05T12:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.927861 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.927911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.927928 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.927951 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:53 crc kubenswrapper[4711]: I1205 12:10:53.927970 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:53Z","lastTransitionTime":"2025-12-05T12:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.031978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.032043 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.032064 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.032091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.032110 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.134471 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.134523 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.134534 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.134552 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.134566 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.238324 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.238430 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.238461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.238574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.238606 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.341705 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.341779 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.341803 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.341839 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.341866 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.444278 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.444368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.444438 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.444469 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.444487 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.547016 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.547075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.547090 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.547117 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.547131 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.650102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.650152 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.650162 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.650179 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.650189 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.683237 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:54 crc kubenswrapper[4711]: E1205 12:10:54.683472 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hv9gb" podUID="fa5afbc1-d1a4-40c8-990d-72a8169d5072" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.752863 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.752918 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.752929 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.752949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.752965 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.860490 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.860537 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.860549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.860569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.860582 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.963715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.963773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.963790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.963813 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:54 crc kubenswrapper[4711]: I1205 12:10:54.963829 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:54Z","lastTransitionTime":"2025-12-05T12:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.067720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.067807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.067820 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.067840 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.067855 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:10:55Z","lastTransitionTime":"2025-12-05T12:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.170813 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.170876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.170892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.170910 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.171072 4711 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.217591 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.218149 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6lhz6"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.218548 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.218715 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.225918 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.226333 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.228881 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-plvn8"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.229603 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t5m9v"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.229872 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.230208 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.232904 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.233643 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-k4w4c"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.234545 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.235473 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.236175 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.236880 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m7fx2"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.237356 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.238177 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.238729 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.238956 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.239095 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.239316 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.239351 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.239611 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.239658 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.239907 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.240272 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.240524 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.240767 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.239618 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.241063 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.241162 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.241225 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.241289 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.241069 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.241649 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.241859 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.241990 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242082 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242143 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242262 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242315 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242399 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242472 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242541 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242639 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242104 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242487 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.242951 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.243469 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-lrpw8"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.243848 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-lrpw8" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.244110 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.254910 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-2f9kw"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.254932 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.256254 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.256449 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.257115 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.257827 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.258701 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.259048 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.259502 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.259916 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.260762 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.281981 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.282564 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dmv5p"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.283111 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.283512 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.284574 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.284875 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.286752 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.286965 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.287098 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.287225 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.287359 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.287532 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.287614 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.287631 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5gn9w"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.287869 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.287953 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.288052 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.288621 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.288624 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.288876 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.292948 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.297509 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.297711 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.297731 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.298829 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.298971 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.299097 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.299181 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.297538 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.298610 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.299356 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.299367 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.298663 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.300610 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.308101 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.298700 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.299600 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.299820 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.303136 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.300446 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sszzb"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.304896 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305187 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305280 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305313 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.323588 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305341 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305371 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305487 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305548 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305579 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305606 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305660 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305901 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.305976 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.306248 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.306289 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.307521 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.307594 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.326219 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.326369 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.326534 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.327412 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.331099 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.331517 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.333050 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.333119 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.333632 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.333757 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.333835 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.334363 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.334501 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.336474 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.342753 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.342995 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.343160 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.343540 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.343711 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.343841 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.343931 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.344319 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.344409 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.344816 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.344982 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.345320 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.345313 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.345776 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.345913 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.346574 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.346982 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.347058 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.348757 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.349047 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.350373 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.350577 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.351168 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.351724 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.351807 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.355374 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xn94x"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.356114 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.361067 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.361521 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.361919 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.362469 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.363535 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.363562 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.364448 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.369807 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wz6zl"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.370514 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.374215 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.375273 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.381976 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.382110 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.383228 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4stqf"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.384023 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.385313 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.388129 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-z9sq9"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.389008 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-sr8f2"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.389083 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.389543 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.389933 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.390039 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.390288 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.390602 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef65fb95-ada6-43cb-8847-aefb174133bb-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xvjz5\" (UID: \"ef65fb95-ada6-43cb-8847-aefb174133bb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.390650 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef65fb95-ada6-43cb-8847-aefb174133bb-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xvjz5\" (UID: \"ef65fb95-ada6-43cb-8847-aefb174133bb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.390677 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pljkk\" (UniqueName: \"kubernetes.io/projected/ef65fb95-ada6-43cb-8847-aefb174133bb-kube-api-access-pljkk\") pod \"openshift-controller-manager-operator-756b6f6bc6-xvjz5\" (UID: \"ef65fb95-ada6-43cb-8847-aefb174133bb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.390753 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-plvn8"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.392680 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6lhz6"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.396056 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.398081 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-k4w4c"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.400863 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.400902 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t5m9v"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.401231 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.401814 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.405561 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.406593 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sszzb"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.419421 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-sg7pj"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.421035 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.421146 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-sg7pj" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.425968 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.428504 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xn94x"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.430460 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.432434 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.434711 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5gn9w"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.438891 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.440707 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m7fx2"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.440774 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.442950 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.443460 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-lrpw8"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.445001 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.447729 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wz6zl"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.451794 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.452926 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.454034 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.455158 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4stqf"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.456190 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dmv5p"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.457345 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.458568 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.460224 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-z9sq9"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.460494 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.461535 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.462316 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.462537 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-sr8f2"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.463582 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.464570 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-sg7pj"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.465681 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.467027 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-cxbnt"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.467778 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rb2nh"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.468142 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.468900 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-nr7l4"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.469268 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.469465 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.469793 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rb2nh"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.470768 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-cxbnt"] Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.480718 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.491205 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef65fb95-ada6-43cb-8847-aefb174133bb-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xvjz5\" (UID: \"ef65fb95-ada6-43cb-8847-aefb174133bb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.491253 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pljkk\" (UniqueName: \"kubernetes.io/projected/ef65fb95-ada6-43cb-8847-aefb174133bb-kube-api-access-pljkk\") pod \"openshift-controller-manager-operator-756b6f6bc6-xvjz5\" (UID: \"ef65fb95-ada6-43cb-8847-aefb174133bb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.491284 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef65fb95-ada6-43cb-8847-aefb174133bb-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xvjz5\" (UID: \"ef65fb95-ada6-43cb-8847-aefb174133bb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.492318 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef65fb95-ada6-43cb-8847-aefb174133bb-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xvjz5\" (UID: \"ef65fb95-ada6-43cb-8847-aefb174133bb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.501000 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef65fb95-ada6-43cb-8847-aefb174133bb-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xvjz5\" (UID: \"ef65fb95-ada6-43cb-8847-aefb174133bb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.505702 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.521226 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.541109 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.561235 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.581670 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.602463 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.621371 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.642803 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.661353 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.680721 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.682440 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.682457 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.682506 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.701531 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.721270 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.741547 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.761855 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.781472 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.802428 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.822179 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.842294 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.862063 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.881676 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.902193 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.922311 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.942135 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.970156 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 12:10:55 crc kubenswrapper[4711]: I1205 12:10:55.981757 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.002262 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.022738 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.047650 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.060843 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.082380 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.102099 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.121315 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.142099 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.161550 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.181200 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.203789 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.221122 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.242208 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.262533 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.281801 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.301671 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.321734 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.342365 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.361806 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.379933 4711 request.go:700] Waited for 1.015107515s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-config-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.381740 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.401740 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.421232 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.442248 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.461663 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.481220 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.502605 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.521082 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.541679 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.562217 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.580782 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.601102 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.622099 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.642097 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.662024 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.681021 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.682195 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.701438 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.729563 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.741460 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.761215 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.781019 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.804200 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.820963 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.845206 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.861566 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.882331 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.901487 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.920871 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.941618 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.969454 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 12:10:56 crc kubenswrapper[4711]: I1205 12:10:56.981898 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.001737 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.022222 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.042103 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.061740 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.082507 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112344 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/39063f31-28f4-4464-b29e-2589d64907bc-stats-auth\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112446 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stsln\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-kube-api-access-stsln\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112475 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/51156375-440f-4e82-8fac-dea612cb45ed-etcd-client\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112496 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e5514b5e-1f9a-42b8-8db2-56b5cb965f97-metrics-tls\") pod \"dns-operator-744455d44c-5gn9w\" (UID: \"e5514b5e-1f9a-42b8-8db2-56b5cb965f97\") " pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112678 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb7xw\" (UniqueName: \"kubernetes.io/projected/e5514b5e-1f9a-42b8-8db2-56b5cb965f97-kube-api-access-cb7xw\") pod \"dns-operator-744455d44c-5gn9w\" (UID: \"e5514b5e-1f9a-42b8-8db2-56b5cb965f97\") " pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112711 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dmv5p\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112733 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c4e1e97-a3ae-414f-b12f-8b2463478934-config\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112758 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112781 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-config\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.112836 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4dc833de-df68-4794-9093-5698c85833cc-audit-dir\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113006 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlw4g\" (UniqueName: \"kubernetes.io/projected/e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5-kube-api-access-tlw4g\") pod \"olm-operator-6b444d44fb-sr9lg\" (UID: \"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113048 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r95d\" (UniqueName: \"kubernetes.io/projected/564b439c-0ac6-43d5-afa8-2379ea73a71e-kube-api-access-2r95d\") pod \"collect-profiles-29415600-csfhl\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113072 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44d0e064-79f2-4614-92ac-3a4d44cdbe92-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113098 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113123 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-audit-policies\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113144 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sblv\" (UniqueName: \"kubernetes.io/projected/51156375-440f-4e82-8fac-dea612cb45ed-kube-api-access-4sblv\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113166 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113203 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/7c4e1e97-a3ae-414f-b12f-8b2463478934-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113239 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm5dw\" (UniqueName: \"kubernetes.io/projected/04563888-6e73-437a-99b3-9dfa5662ff33-kube-api-access-zm5dw\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113274 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113305 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4nfp\" (UniqueName: \"kubernetes.io/projected/d454c199-9738-4862-a556-094087bde5b1-kube-api-access-c4nfp\") pod \"downloads-7954f5f757-lrpw8\" (UID: \"d454c199-9738-4862-a556-094087bde5b1\") " pod="openshift-console/downloads-7954f5f757-lrpw8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113345 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-registry-certificates\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.113415 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:57.613359627 +0000 UTC m=+103.197682157 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113479 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113515 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/51156375-440f-4e82-8fac-dea612cb45ed-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113540 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3dbd950-893e-4a2f-866f-8c538e7371e0-audit-dir\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113588 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8vqv\" (UniqueName: \"kubernetes.io/projected/06f3328e-2709-4545-8e34-a253878a788a-kube-api-access-l8vqv\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113616 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ql8p\" (UniqueName: \"kubernetes.io/projected/b3dbd950-893e-4a2f-866f-8c538e7371e0-kube-api-access-5ql8p\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113644 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7c4e1e97-a3ae-414f-b12f-8b2463478934-images\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113666 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04563888-6e73-437a-99b3-9dfa5662ff33-serving-cert\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113694 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113720 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/51156375-440f-4e82-8fac-dea612cb45ed-audit-dir\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113745 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9d5f602-e7a7-4f57-9696-0b020b8a9e3f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fdd5c\" (UID: \"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113774 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/51156375-440f-4e82-8fac-dea612cb45ed-encryption-config\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113807 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113850 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-registry-tls\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113900 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmm47\" (UniqueName: \"kubernetes.io/projected/39063f31-28f4-4464-b29e-2589d64907bc-kube-api-access-xmm47\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113946 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51156375-440f-4e82-8fac-dea612cb45ed-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.113963 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbhmt\" (UniqueName: \"kubernetes.io/projected/0c96a75b-8c13-4da0-abcb-95855f1fbac5-kube-api-access-tbhmt\") pod \"marketplace-operator-79b997595-dmv5p\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114010 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/39063f31-28f4-4464-b29e-2589d64907bc-default-certificate\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114047 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5r9n\" (UniqueName: \"kubernetes.io/projected/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-kube-api-access-d5r9n\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114075 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114104 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9727afba-153c-4f79-9101-ccbd7e497ebe-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-n5vr8\" (UID: \"9727afba-153c-4f79-9101-ccbd7e497ebe\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114127 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b3dbd950-893e-4a2f-866f-8c538e7371e0-node-pullsecrets\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114171 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b3dbd950-893e-4a2f-866f-8c538e7371e0-encryption-config\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114194 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/06f3328e-2709-4545-8e34-a253878a788a-machine-approver-tls\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114216 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-serving-cert\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114243 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-config\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114268 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-trusted-ca\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114302 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/44d0e064-79f2-4614-92ac-3a4d44cdbe92-service-ca\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114331 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8v8f\" (UniqueName: \"kubernetes.io/projected/7c4e1e97-a3ae-414f-b12f-8b2463478934-kube-api-access-s8v8f\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114353 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/564b439c-0ac6-43d5-afa8-2379ea73a71e-config-volume\") pod \"collect-profiles-29415600-csfhl\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114375 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/44d0e064-79f2-4614-92ac-3a4d44cdbe92-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114458 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114480 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-image-import-ca\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114514 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dmv5p\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114542 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06f3328e-2709-4545-8e34-a253878a788a-config\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114570 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/564b439c-0ac6-43d5-afa8-2379ea73a71e-secret-volume\") pod \"collect-profiles-29415600-csfhl\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114589 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9727afba-153c-4f79-9101-ccbd7e497ebe-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-n5vr8\" (UID: \"9727afba-153c-4f79-9101-ccbd7e497ebe\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114606 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-client-ca\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114638 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0206d524-6952-4adb-a5a6-2dc7fc8a60a5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sszzb\" (UID: \"0206d524-6952-4adb-a5a6-2dc7fc8a60a5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114665 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtphk\" (UniqueName: \"kubernetes.io/projected/0206d524-6952-4adb-a5a6-2dc7fc8a60a5-kube-api-access-qtphk\") pod \"multus-admission-controller-857f4d67dd-sszzb\" (UID: \"0206d524-6952-4adb-a5a6-2dc7fc8a60a5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114689 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51156375-440f-4e82-8fac-dea612cb45ed-serving-cert\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114714 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114737 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/06f3328e-2709-4545-8e34-a253878a788a-auth-proxy-config\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114758 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9d5f602-e7a7-4f57-9696-0b020b8a9e3f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fdd5c\" (UID: \"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114780 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7sf7\" (UniqueName: \"kubernetes.io/projected/a9d5f602-e7a7-4f57-9696-0b020b8a9e3f-kube-api-access-t7sf7\") pod \"kube-storage-version-migrator-operator-b67b599dd-fdd5c\" (UID: \"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114806 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9727afba-153c-4f79-9101-ccbd7e497ebe-config\") pod \"kube-apiserver-operator-766d6c64bb-n5vr8\" (UID: \"9727afba-153c-4f79-9101-ccbd7e497ebe\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114830 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/51156375-440f-4e82-8fac-dea612cb45ed-audit-policies\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114867 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/44d0e064-79f2-4614-92ac-3a4d44cdbe92-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.114914 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-bound-sa-token\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115008 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-config\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115027 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115043 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5-srv-cert\") pod \"olm-operator-6b444d44fb-sr9lg\" (UID: \"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115098 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39063f31-28f4-4464-b29e-2589d64907bc-metrics-certs\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115115 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115170 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/259a4e64-67b8-417e-8948-4cc028bb728d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115193 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/259a4e64-67b8-417e-8948-4cc028bb728d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115226 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115245 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-etcd-serving-ca\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115304 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-client-ca\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115359 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115426 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39063f31-28f4-4464-b29e-2589d64907bc-service-ca-bundle\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115455 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/44d0e064-79f2-4614-92ac-3a4d44cdbe92-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115506 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ltkp\" (UniqueName: \"kubernetes.io/projected/4dc833de-df68-4794-9093-5698c85833cc-kube-api-access-7ltkp\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115526 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-sr9lg\" (UID: \"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115547 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-audit\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115566 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b3dbd950-893e-4a2f-866f-8c538e7371e0-etcd-client\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.115583 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3dbd950-893e-4a2f-866f-8c538e7371e0-serving-cert\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.121609 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.141857 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.160968 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.181676 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.216685 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.216908 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a4b80150-d70c-4bea-a687-46b452b82f6d-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-cxjlv\" (UID: \"a4b80150-d70c-4bea-a687-46b452b82f6d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.216932 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/355f45bc-83fe-49ab-844e-f91a22b8f6fb-apiservice-cert\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.216952 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1-config\") pod \"kube-controller-manager-operator-78b949d7b-hcl2z\" (UID: \"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.217119 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:57.717094106 +0000 UTC m=+103.301416436 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217143 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217176 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r95d\" (UniqueName: \"kubernetes.io/projected/564b439c-0ac6-43d5-afa8-2379ea73a71e-kube-api-access-2r95d\") pod \"collect-profiles-29415600-csfhl\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217217 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a4b80150-d70c-4bea-a687-46b452b82f6d-proxy-tls\") pod \"machine-config-controller-84d6567774-cxjlv\" (UID: \"a4b80150-d70c-4bea-a687-46b452b82f6d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217241 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg7dr\" (UniqueName: \"kubernetes.io/projected/39bf8b25-13ec-4cf0-ba60-6159836e8f09-kube-api-access-mg7dr\") pod \"package-server-manager-789f6589d5-q4s7j\" (UID: \"39bf8b25-13ec-4cf0-ba60-6159836e8f09\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217262 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-audit-policies\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217313 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13866508-4ae9-427a-a2c8-5444fe01b9d2-config\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217348 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/50f42111-4c58-408b-b2be-f739d494ef28-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-xvdpr\" (UID: \"50f42111-4c58-408b-b2be-f739d494ef28\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217369 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssxrd\" (UniqueName: \"kubernetes.io/projected/56f640a1-033c-4c5f-b8f7-70bad15eaed3-kube-api-access-ssxrd\") pod \"openshift-config-operator-7777fb866f-p4qj4\" (UID: \"56f640a1-033c-4c5f-b8f7-70bad15eaed3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217445 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/08003b99-9e38-4d7d-a60f-2397dda4f9a6-srv-cert\") pod \"catalog-operator-68c6474976-dhtx2\" (UID: \"08003b99-9e38-4d7d-a60f-2397dda4f9a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217468 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-metrics-tls\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217489 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjzxl\" (UniqueName: \"kubernetes.io/projected/ebcee70b-d418-4bad-b7df-0c46f8cd7422-kube-api-access-fjzxl\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217544 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/51156375-440f-4e82-8fac-dea612cb45ed-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217565 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a64a971-347f-47da-8e47-1443b9ffebcc-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wzcpg\" (UID: \"1a64a971-347f-47da-8e47-1443b9ffebcc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217593 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hcl2z\" (UID: \"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217615 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7c4e1e97-a3ae-414f-b12f-8b2463478934-images\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217640 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56f640a1-033c-4c5f-b8f7-70bad15eaed3-serving-cert\") pod \"openshift-config-operator-7777fb866f-p4qj4\" (UID: \"56f640a1-033c-4c5f-b8f7-70bad15eaed3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217663 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/51156375-440f-4e82-8fac-dea612cb45ed-encryption-config\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217685 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9d5f602-e7a7-4f57-9696-0b020b8a9e3f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fdd5c\" (UID: \"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217708 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b31df7a1-216b-4579-ab74-96986ec8cb1d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217730 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217762 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/540b9374-4012-4532-9d68-ef686b30ba78-config-volume\") pod \"dns-default-cxbnt\" (UID: \"540b9374-4012-4532-9d68-ef686b30ba78\") " pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217787 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbhmt\" (UniqueName: \"kubernetes.io/projected/0c96a75b-8c13-4da0-abcb-95855f1fbac5-kube-api-access-tbhmt\") pod \"marketplace-operator-79b997595-dmv5p\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217813 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/39063f31-28f4-4464-b29e-2589d64907bc-default-certificate\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217841 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-trusted-ca\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217867 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7ch5\" (UniqueName: \"kubernetes.io/projected/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-kube-api-access-k7ch5\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217892 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a64a971-347f-47da-8e47-1443b9ffebcc-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wzcpg\" (UID: \"1a64a971-347f-47da-8e47-1443b9ffebcc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217915 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-csi-data-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217939 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tkzn\" (UniqueName: \"kubernetes.io/projected/e2342447-c2fa-4e43-8a78-acde9e70782a-kube-api-access-9tkzn\") pod \"migrator-59844c95c7-z4kxc\" (UID: \"e2342447-c2fa-4e43-8a78-acde9e70782a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217957 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/90c8c074-7442-4155-a948-817b93fec221-cert\") pod \"ingress-canary-sg7pj\" (UID: \"90c8c074-7442-4155-a948-817b93fec221\") " pod="openshift-ingress-canary/ingress-canary-sg7pj" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.217981 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3845ed2f-d607-4ba6-9d74-5293cfa722fd-config\") pod \"service-ca-operator-777779d784-rn6fh\" (UID: \"3845ed2f-d607-4ba6-9d74-5293cfa722fd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218000 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebcee70b-d418-4bad-b7df-0c46f8cd7422-config\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218034 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-serving-cert\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218054 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4b27d33-ae80-497b-adc2-625f11662e4d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-q96td\" (UID: \"e4b27d33-ae80-497b-adc2-625f11662e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218084 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p4cm\" (UniqueName: \"kubernetes.io/projected/0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd-kube-api-access-6p4cm\") pod \"machine-config-server-nr7l4\" (UID: \"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd\") " pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218108 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/44d0e064-79f2-4614-92ac-3a4d44cdbe92-service-ca\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218141 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-image-import-ca\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218164 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8v8f\" (UniqueName: \"kubernetes.io/projected/7c4e1e97-a3ae-414f-b12f-8b2463478934-kube-api-access-s8v8f\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218185 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/564b439c-0ac6-43d5-afa8-2379ea73a71e-config-volume\") pod \"collect-profiles-29415600-csfhl\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218207 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-trusted-ca\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218230 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/450b40c8-78a2-4ac7-97ba-56b2b165c0eb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-55845\" (UID: \"450b40c8-78a2-4ac7-97ba-56b2b165c0eb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218255 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9727afba-153c-4f79-9101-ccbd7e497ebe-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-n5vr8\" (UID: \"9727afba-153c-4f79-9101-ccbd7e497ebe\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218275 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dmv5p\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218298 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/564b439c-0ac6-43d5-afa8-2379ea73a71e-secret-volume\") pod \"collect-profiles-29415600-csfhl\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218319 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51156375-440f-4e82-8fac-dea612cb45ed-serving-cert\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218342 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0206d524-6952-4adb-a5a6-2dc7fc8a60a5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sszzb\" (UID: \"0206d524-6952-4adb-a5a6-2dc7fc8a60a5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.218430 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtphk\" (UniqueName: \"kubernetes.io/projected/0206d524-6952-4adb-a5a6-2dc7fc8a60a5-kube-api-access-qtphk\") pod \"multus-admission-controller-857f4d67dd-sszzb\" (UID: \"0206d524-6952-4adb-a5a6-2dc7fc8a60a5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.220420 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-trusted-ca\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.220547 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-audit-policies\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.222009 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-image-import-ca\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.222959 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/564b439c-0ac6-43d5-afa8-2379ea73a71e-config-volume\") pod \"collect-profiles-29415600-csfhl\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.223404 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/51156375-440f-4e82-8fac-dea612cb45ed-encryption-config\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.223540 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.223681 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/44d0e064-79f2-4614-92ac-3a4d44cdbe92-service-ca\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.223951 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnrsf\" (UniqueName: \"kubernetes.io/projected/df146046-34ea-410b-a342-83bc374306d1-kube-api-access-mnrsf\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.223984 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/39063f31-28f4-4464-b29e-2589d64907bc-default-certificate\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224004 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9727afba-153c-4f79-9101-ccbd7e497ebe-config\") pod \"kube-apiserver-operator-766d6c64bb-n5vr8\" (UID: \"9727afba-153c-4f79-9101-ccbd7e497ebe\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224036 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224117 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-289ml\" (UniqueName: \"kubernetes.io/projected/3845ed2f-d607-4ba6-9d74-5293cfa722fd-kube-api-access-289ml\") pod \"service-ca-operator-777779d784-rn6fh\" (UID: \"3845ed2f-d607-4ba6-9d74-5293cfa722fd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224118 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9d5f602-e7a7-4f57-9696-0b020b8a9e3f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fdd5c\" (UID: \"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224154 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b31df7a1-216b-4579-ab74-96986ec8cb1d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224222 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e280918-a8ee-40bf-84b3-6c2ee464003f-config\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224262 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39063f31-28f4-4464-b29e-2589d64907bc-metrics-certs\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224285 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mspc\" (UniqueName: \"kubernetes.io/projected/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-kube-api-access-7mspc\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224334 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224354 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5-srv-cert\") pod \"olm-operator-6b444d44fb-sr9lg\" (UID: \"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224426 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224446 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-oauth-serving-cert\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224463 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-socket-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224487 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/259a4e64-67b8-417e-8948-4cc028bb728d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224505 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-oauth-config\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224815 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dmv5p\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.224816 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7c4e1e97-a3ae-414f-b12f-8b2463478934-images\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225010 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/51156375-440f-4e82-8fac-dea612cb45ed-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225516 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/259a4e64-67b8-417e-8948-4cc028bb728d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225593 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-client-ca\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225623 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-etcd-serving-ca\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225694 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-auth-proxy-config\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225738 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75tn8\" (UniqueName: \"kubernetes.io/projected/540b9374-4012-4532-9d68-ef686b30ba78-kube-api-access-75tn8\") pod \"dns-default-cxbnt\" (UID: \"540b9374-4012-4532-9d68-ef686b30ba78\") " pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225769 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-sr9lg\" (UID: \"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225826 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b3dbd950-893e-4a2f-866f-8c538e7371e0-etcd-client\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225855 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/39063f31-28f4-4464-b29e-2589d64907bc-stats-auth\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225882 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7e280918-a8ee-40bf-84b3-6c2ee464003f-etcd-service-ca\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225905 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7e280918-a8ee-40bf-84b3-6c2ee464003f-etcd-client\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225929 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/51156375-440f-4e82-8fac-dea612cb45ed-etcd-client\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225953 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e5514b5e-1f9a-42b8-8db2-56b5cb965f97-metrics-tls\") pod \"dns-operator-744455d44c-5gn9w\" (UID: \"e5514b5e-1f9a-42b8-8db2-56b5cb965f97\") " pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.225982 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj8df\" (UniqueName: \"kubernetes.io/projected/b31df7a1-216b-4579-ab74-96986ec8cb1d-kube-api-access-fj8df\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.226023 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebcee70b-d418-4bad-b7df-0c46f8cd7422-serving-cert\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.226265 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9727afba-153c-4f79-9101-ccbd7e497ebe-config\") pod \"kube-apiserver-operator-766d6c64bb-n5vr8\" (UID: \"9727afba-153c-4f79-9101-ccbd7e497ebe\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.226437 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.226528 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.228273 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0206d524-6952-4adb-a5a6-2dc7fc8a60a5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sszzb\" (UID: \"0206d524-6952-4adb-a5a6-2dc7fc8a60a5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.228312 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-serving-cert\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.228670 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/259a4e64-67b8-417e-8948-4cc028bb728d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229179 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-etcd-serving-ca\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229262 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c4e1e97-a3ae-414f-b12f-8b2463478934-config\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229744 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5-srv-cert\") pod \"olm-operator-6b444d44fb-sr9lg\" (UID: \"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229800 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-config\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229837 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/08003b99-9e38-4d7d-a60f-2397dda4f9a6-profile-collector-cert\") pod \"catalog-operator-68c6474976-dhtx2\" (UID: \"08003b99-9e38-4d7d-a60f-2397dda4f9a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229890 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c4e1e97-a3ae-414f-b12f-8b2463478934-config\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229892 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4dc833de-df68-4794-9093-5698c85833cc-audit-dir\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229923 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4dc833de-df68-4794-9093-5698c85833cc-audit-dir\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229927 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlw4g\" (UniqueName: \"kubernetes.io/projected/e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5-kube-api-access-tlw4g\") pod \"olm-operator-6b444d44fb-sr9lg\" (UID: \"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229965 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pvr2\" (UniqueName: \"kubernetes.io/projected/7e280918-a8ee-40bf-84b3-6c2ee464003f-kube-api-access-8pvr2\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.229987 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtsdh\" (UniqueName: \"kubernetes.io/projected/1415693c-1ea2-42b6-8e90-5bebbb33db90-kube-api-access-gtsdh\") pod \"service-ca-9c57cc56f-z9sq9\" (UID: \"1415693c-1ea2-42b6-8e90-5bebbb33db90\") " pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44d0e064-79f2-4614-92ac-3a4d44cdbe92-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230205 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39063f31-28f4-4464-b29e-2589d64907bc-metrics-certs\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230214 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd-certs\") pod \"machine-config-server-nr7l4\" (UID: \"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd\") " pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230273 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230307 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sblv\" (UniqueName: \"kubernetes.io/projected/51156375-440f-4e82-8fac-dea612cb45ed-kube-api-access-4sblv\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230330 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230377 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/7c4e1e97-a3ae-414f-b12f-8b2463478934-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230474 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13866508-4ae9-427a-a2c8-5444fe01b9d2-serving-cert\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230493 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1415693c-1ea2-42b6-8e90-5bebbb33db90-signing-cabundle\") pod \"service-ca-9c57cc56f-z9sq9\" (UID: \"1415693c-1ea2-42b6-8e90-5bebbb33db90\") " pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230512 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-registry-certificates\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230534 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm5dw\" (UniqueName: \"kubernetes.io/projected/04563888-6e73-437a-99b3-9dfa5662ff33-kube-api-access-zm5dw\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230620 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230642 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4nfp\" (UniqueName: \"kubernetes.io/projected/d454c199-9738-4862-a556-094087bde5b1-kube-api-access-c4nfp\") pod \"downloads-7954f5f757-lrpw8\" (UID: \"d454c199-9738-4862-a556-094087bde5b1\") " pod="openshift-console/downloads-7954f5f757-lrpw8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230663 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ebcee70b-d418-4bad-b7df-0c46f8cd7422-trusted-ca\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230683 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230702 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3dbd950-893e-4a2f-866f-8c538e7371e0-audit-dir\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230727 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/540b9374-4012-4532-9d68-ef686b30ba78-metrics-tls\") pod \"dns-default-cxbnt\" (UID: \"540b9374-4012-4532-9d68-ef686b30ba78\") " pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230754 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8vqv\" (UniqueName: \"kubernetes.io/projected/06f3328e-2709-4545-8e34-a253878a788a-kube-api-access-l8vqv\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230779 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-service-ca\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230805 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3845ed2f-d607-4ba6-9d74-5293cfa722fd-serving-cert\") pod \"service-ca-operator-777779d784-rn6fh\" (UID: \"3845ed2f-d607-4ba6-9d74-5293cfa722fd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230834 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04563888-6e73-437a-99b3-9dfa5662ff33-serving-cert\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230867 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230890 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/51156375-440f-4e82-8fac-dea612cb45ed-audit-dir\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230898 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230918 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ql8p\" (UniqueName: \"kubernetes.io/projected/b3dbd950-893e-4a2f-866f-8c538e7371e0-kube-api-access-5ql8p\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230946 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbrvn\" (UniqueName: \"kubernetes.io/projected/a4b80150-d70c-4bea-a687-46b452b82f6d-kube-api-access-rbrvn\") pod \"machine-config-controller-84d6567774-cxjlv\" (UID: \"a4b80150-d70c-4bea-a687-46b452b82f6d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.230970 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1415693c-1ea2-42b6-8e90-5bebbb33db90-signing-key\") pod \"service-ca-9c57cc56f-z9sq9\" (UID: \"1415693c-1ea2-42b6-8e90-5bebbb33db90\") " pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231011 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a64a971-347f-47da-8e47-1443b9ffebcc-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wzcpg\" (UID: \"1a64a971-347f-47da-8e47-1443b9ffebcc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231038 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-registry-tls\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231062 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmm47\" (UniqueName: \"kubernetes.io/projected/39063f31-28f4-4464-b29e-2589d64907bc-kube-api-access-xmm47\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231090 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51156375-440f-4e82-8fac-dea612cb45ed-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231116 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hcl2z\" (UID: \"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231148 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5r9n\" (UniqueName: \"kubernetes.io/projected/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-kube-api-access-d5r9n\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231177 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-proxy-tls\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231202 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231251 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/564b439c-0ac6-43d5-afa8-2379ea73a71e-secret-volume\") pod \"collect-profiles-29415600-csfhl\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231338 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/51156375-440f-4e82-8fac-dea612cb45ed-audit-dir\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231369 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/259a4e64-67b8-417e-8948-4cc028bb728d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231437 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/355f45bc-83fe-49ab-844e-f91a22b8f6fb-tmpfs\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231463 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231481 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/13866508-4ae9-427a-a2c8-5444fe01b9d2-service-ca-bundle\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231500 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9727afba-153c-4f79-9101-ccbd7e497ebe-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-n5vr8\" (UID: \"9727afba-153c-4f79-9101-ccbd7e497ebe\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231519 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b3dbd950-893e-4a2f-866f-8c538e7371e0-node-pullsecrets\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231529 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-sr9lg\" (UID: \"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231537 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9znl\" (UniqueName: \"kubernetes.io/projected/355f45bc-83fe-49ab-844e-f91a22b8f6fb-kube-api-access-w9znl\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231603 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-config\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231635 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b3dbd950-893e-4a2f-866f-8c538e7371e0-encryption-config\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231668 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/06f3328e-2709-4545-8e34-a253878a788a-machine-approver-tls\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231700 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231736 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh9mq\" (UniqueName: \"kubernetes.io/projected/13866508-4ae9-427a-a2c8-5444fe01b9d2-kube-api-access-wh9mq\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231765 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/b31df7a1-216b-4579-ab74-96986ec8cb1d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231794 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/355f45bc-83fe-49ab-844e-f91a22b8f6fb-webhook-cert\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231830 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/44d0e064-79f2-4614-92ac-3a4d44cdbe92-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231935 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-config\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231961 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-client-ca\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.231990 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06f3328e-2709-4545-8e34-a253878a788a-config\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232019 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e280918-a8ee-40bf-84b3-6c2ee464003f-serving-cert\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232045 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtt5z\" (UniqueName: \"kubernetes.io/projected/08003b99-9e38-4d7d-a60f-2397dda4f9a6-kube-api-access-mtt5z\") pod \"catalog-operator-68c6474976-dhtx2\" (UID: \"08003b99-9e38-4d7d-a60f-2397dda4f9a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232071 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd-node-bootstrap-token\") pod \"machine-config-server-nr7l4\" (UID: \"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd\") " pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232099 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-mountpoint-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232114 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232121 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-plugins-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232145 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3dbd950-893e-4a2f-866f-8c538e7371e0-audit-dir\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232155 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7sf7\" (UniqueName: \"kubernetes.io/projected/a9d5f602-e7a7-4f57-9696-0b020b8a9e3f-kube-api-access-t7sf7\") pod \"kube-storage-version-migrator-operator-b67b599dd-fdd5c\" (UID: \"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232182 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4b27d33-ae80-497b-adc2-625f11662e4d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-q96td\" (UID: \"e4b27d33-ae80-497b-adc2-625f11662e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232211 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/06f3328e-2709-4545-8e34-a253878a788a-auth-proxy-config\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232239 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9d5f602-e7a7-4f57-9696-0b020b8a9e3f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fdd5c\" (UID: \"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232268 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/44d0e064-79f2-4614-92ac-3a4d44cdbe92-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232292 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/51156375-440f-4e82-8fac-dea612cb45ed-audit-policies\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232316 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-serving-cert\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232338 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-registration-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232375 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-bound-sa-token\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232399 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232422 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-config\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232837 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e5514b5e-1f9a-42b8-8db2-56b5cb965f97-metrics-tls\") pod \"dns-operator-744455d44c-5gn9w\" (UID: \"e5514b5e-1f9a-42b8-8db2-56b5cb965f97\") " pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.232942 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.233139 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51156375-440f-4e82-8fac-dea612cb45ed-serving-cert\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.233292 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44d0e064-79f2-4614-92ac-3a4d44cdbe92-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234673 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234711 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/56f640a1-033c-4c5f-b8f7-70bad15eaed3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-p4qj4\" (UID: \"56f640a1-033c-4c5f-b8f7-70bad15eaed3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234762 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-trusted-ca-bundle\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234792 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4z29\" (UniqueName: \"kubernetes.io/projected/90c8c074-7442-4155-a948-817b93fec221-kube-api-access-c4z29\") pod \"ingress-canary-sg7pj\" (UID: \"90c8c074-7442-4155-a948-817b93fec221\") " pod="openshift-ingress-canary/ingress-canary-sg7pj" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234820 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234847 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39063f31-28f4-4464-b29e-2589d64907bc-service-ca-bundle\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234877 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8b6z\" (UniqueName: \"kubernetes.io/projected/450b40c8-78a2-4ac7-97ba-56b2b165c0eb-kube-api-access-c8b6z\") pod \"cluster-samples-operator-665b6dd947-55845\" (UID: \"450b40c8-78a2-4ac7-97ba-56b2b165c0eb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234906 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/44d0e064-79f2-4614-92ac-3a4d44cdbe92-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234934 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/13866508-4ae9-427a-a2c8-5444fe01b9d2-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234956 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b3dbd950-893e-4a2f-866f-8c538e7371e0-node-pullsecrets\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234964 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ltkp\" (UniqueName: \"kubernetes.io/projected/4dc833de-df68-4794-9093-5698c85833cc-kube-api-access-7ltkp\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.234990 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-audit\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235017 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3dbd950-893e-4a2f-866f-8c538e7371e0-serving-cert\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235042 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66h5k\" (UniqueName: \"kubernetes.io/projected/e4b27d33-ae80-497b-adc2-625f11662e4d-kube-api-access-66h5k\") pod \"openshift-apiserver-operator-796bbdcf4f-q96td\" (UID: \"e4b27d33-ae80-497b-adc2-625f11662e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235069 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stsln\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-kube-api-access-stsln\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb7xw\" (UniqueName: \"kubernetes.io/projected/e5514b5e-1f9a-42b8-8db2-56b5cb965f97-kube-api-access-cb7xw\") pod \"dns-operator-744455d44c-5gn9w\" (UID: \"e5514b5e-1f9a-42b8-8db2-56b5cb965f97\") " pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235128 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dmv5p\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235155 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7e280918-a8ee-40bf-84b3-6c2ee464003f-etcd-ca\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235183 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s9xv\" (UniqueName: \"kubernetes.io/projected/50f42111-4c58-408b-b2be-f739d494ef28-kube-api-access-6s9xv\") pod \"control-plane-machine-set-operator-78cbb6b69f-xvdpr\" (UID: \"50f42111-4c58-408b-b2be-f739d494ef28\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235217 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235247 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-config\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235271 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqqwc\" (UniqueName: \"kubernetes.io/projected/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-kube-api-access-gqqwc\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235270 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235762 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-registry-certificates\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.235901 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-client-ca\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.236297 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-config\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.236596 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-client-ca\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.237063 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06f3328e-2709-4545-8e34-a253878a788a-config\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.237743 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51156375-440f-4e82-8fac-dea612cb45ed-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.238490 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/44d0e064-79f2-4614-92ac-3a4d44cdbe92-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.238919 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/06f3328e-2709-4545-8e34-a253878a788a-auth-proxy-config\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.239559 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b3dbd950-893e-4a2f-866f-8c538e7371e0-audit\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.239554 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39063f31-28f4-4464-b29e-2589d64907bc-service-ca-bundle\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.239612 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9d5f602-e7a7-4f57-9696-0b020b8a9e3f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fdd5c\" (UID: \"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.239632 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/44d0e064-79f2-4614-92ac-3a4d44cdbe92-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.239669 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dmv5p\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.240134 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/39063f31-28f4-4464-b29e-2589d64907bc-stats-auth\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.240136 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/51156375-440f-4e82-8fac-dea612cb45ed-audit-policies\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.240349 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/39bf8b25-13ec-4cf0-ba60-6159836e8f09-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-q4s7j\" (UID: \"39bf8b25-13ec-4cf0-ba60-6159836e8f09\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.240507 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-images\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.240528 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.240668 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/51156375-440f-4e82-8fac-dea612cb45ed-etcd-client\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.240706 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-config\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.240795 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:57.740775138 +0000 UTC m=+103.325097678 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.241865 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b3dbd950-893e-4a2f-866f-8c538e7371e0-etcd-client\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.242086 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.242467 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b3dbd950-893e-4a2f-866f-8c538e7371e0-encryption-config\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.243826 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3dbd950-893e-4a2f-866f-8c538e7371e0-serving-cert\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.243874 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-registry-tls\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.244240 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.244440 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.244909 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9727afba-153c-4f79-9101-ccbd7e497ebe-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-n5vr8\" (UID: \"9727afba-153c-4f79-9101-ccbd7e497ebe\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.244955 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/06f3328e-2709-4545-8e34-a253878a788a-machine-approver-tls\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.245072 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/7c4e1e97-a3ae-414f-b12f-8b2463478934-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.245698 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04563888-6e73-437a-99b3-9dfa5662ff33-serving-cert\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.245898 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.246012 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.262109 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.281900 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.301926 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.321714 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.341341 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.341552 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-oauth-config\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.341582 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-auth-proxy-config\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.341674 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:57.841625607 +0000 UTC m=+103.425947937 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.341763 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75tn8\" (UniqueName: \"kubernetes.io/projected/540b9374-4012-4532-9d68-ef686b30ba78-kube-api-access-75tn8\") pod \"dns-default-cxbnt\" (UID: \"540b9374-4012-4532-9d68-ef686b30ba78\") " pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.341817 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7e280918-a8ee-40bf-84b3-6c2ee464003f-etcd-service-ca\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.341858 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7e280918-a8ee-40bf-84b3-6c2ee464003f-etcd-client\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.341935 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj8df\" (UniqueName: \"kubernetes.io/projected/b31df7a1-216b-4579-ab74-96986ec8cb1d-kube-api-access-fj8df\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.341965 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebcee70b-d418-4bad-b7df-0c46f8cd7422-serving-cert\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342006 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/08003b99-9e38-4d7d-a60f-2397dda4f9a6-profile-collector-cert\") pod \"catalog-operator-68c6474976-dhtx2\" (UID: \"08003b99-9e38-4d7d-a60f-2397dda4f9a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342043 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pvr2\" (UniqueName: \"kubernetes.io/projected/7e280918-a8ee-40bf-84b3-6c2ee464003f-kube-api-access-8pvr2\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342074 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtsdh\" (UniqueName: \"kubernetes.io/projected/1415693c-1ea2-42b6-8e90-5bebbb33db90-kube-api-access-gtsdh\") pod \"service-ca-9c57cc56f-z9sq9\" (UID: \"1415693c-1ea2-42b6-8e90-5bebbb33db90\") " pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342123 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd-certs\") pod \"machine-config-server-nr7l4\" (UID: \"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd\") " pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342145 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342159 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342179 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13866508-4ae9-427a-a2c8-5444fe01b9d2-serving-cert\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342199 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1415693c-1ea2-42b6-8e90-5bebbb33db90-signing-cabundle\") pod \"service-ca-9c57cc56f-z9sq9\" (UID: \"1415693c-1ea2-42b6-8e90-5bebbb33db90\") " pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342233 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ebcee70b-d418-4bad-b7df-0c46f8cd7422-trusted-ca\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342268 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/540b9374-4012-4532-9d68-ef686b30ba78-metrics-tls\") pod \"dns-default-cxbnt\" (UID: \"540b9374-4012-4532-9d68-ef686b30ba78\") " pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342302 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-service-ca\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342368 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3845ed2f-d607-4ba6-9d74-5293cfa722fd-serving-cert\") pod \"service-ca-operator-777779d784-rn6fh\" (UID: \"3845ed2f-d607-4ba6-9d74-5293cfa722fd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342438 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbrvn\" (UniqueName: \"kubernetes.io/projected/a4b80150-d70c-4bea-a687-46b452b82f6d-kube-api-access-rbrvn\") pod \"machine-config-controller-84d6567774-cxjlv\" (UID: \"a4b80150-d70c-4bea-a687-46b452b82f6d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342462 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1415693c-1ea2-42b6-8e90-5bebbb33db90-signing-key\") pod \"service-ca-9c57cc56f-z9sq9\" (UID: \"1415693c-1ea2-42b6-8e90-5bebbb33db90\") " pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342503 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a64a971-347f-47da-8e47-1443b9ffebcc-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wzcpg\" (UID: \"1a64a971-347f-47da-8e47-1443b9ffebcc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.342270 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-auth-proxy-config\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343010 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7e280918-a8ee-40bf-84b3-6c2ee464003f-etcd-service-ca\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343693 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1415693c-1ea2-42b6-8e90-5bebbb33db90-signing-cabundle\") pod \"service-ca-9c57cc56f-z9sq9\" (UID: \"1415693c-1ea2-42b6-8e90-5bebbb33db90\") " pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343772 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hcl2z\" (UID: \"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343813 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-proxy-tls\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343837 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/355f45bc-83fe-49ab-844e-f91a22b8f6fb-tmpfs\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343861 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/13866508-4ae9-427a-a2c8-5444fe01b9d2-service-ca-bundle\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343884 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9znl\" (UniqueName: \"kubernetes.io/projected/355f45bc-83fe-49ab-844e-f91a22b8f6fb-kube-api-access-w9znl\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343913 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh9mq\" (UniqueName: \"kubernetes.io/projected/13866508-4ae9-427a-a2c8-5444fe01b9d2-kube-api-access-wh9mq\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343934 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/b31df7a1-216b-4579-ab74-96986ec8cb1d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343958 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/355f45bc-83fe-49ab-844e-f91a22b8f6fb-webhook-cert\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.343997 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e280918-a8ee-40bf-84b3-6c2ee464003f-serving-cert\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344017 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtt5z\" (UniqueName: \"kubernetes.io/projected/08003b99-9e38-4d7d-a60f-2397dda4f9a6-kube-api-access-mtt5z\") pod \"catalog-operator-68c6474976-dhtx2\" (UID: \"08003b99-9e38-4d7d-a60f-2397dda4f9a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344040 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd-node-bootstrap-token\") pod \"machine-config-server-nr7l4\" (UID: \"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd\") " pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344064 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-mountpoint-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344085 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-plugins-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344119 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4b27d33-ae80-497b-adc2-625f11662e4d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-q96td\" (UID: \"e4b27d33-ae80-497b-adc2-625f11662e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344150 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-serving-cert\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344163 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-service-ca\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344175 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-registration-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344264 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/56f640a1-033c-4c5f-b8f7-70bad15eaed3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-p4qj4\" (UID: \"56f640a1-033c-4c5f-b8f7-70bad15eaed3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344293 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-trusted-ca-bundle\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344323 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4z29\" (UniqueName: \"kubernetes.io/projected/90c8c074-7442-4155-a948-817b93fec221-kube-api-access-c4z29\") pod \"ingress-canary-sg7pj\" (UID: \"90c8c074-7442-4155-a948-817b93fec221\") " pod="openshift-ingress-canary/ingress-canary-sg7pj" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344351 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8b6z\" (UniqueName: \"kubernetes.io/projected/450b40c8-78a2-4ac7-97ba-56b2b165c0eb-kube-api-access-c8b6z\") pod \"cluster-samples-operator-665b6dd947-55845\" (UID: \"450b40c8-78a2-4ac7-97ba-56b2b165c0eb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344349 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/355f45bc-83fe-49ab-844e-f91a22b8f6fb-tmpfs\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344377 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/13866508-4ae9-427a-a2c8-5444fe01b9d2-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344443 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66h5k\" (UniqueName: \"kubernetes.io/projected/e4b27d33-ae80-497b-adc2-625f11662e4d-kube-api-access-66h5k\") pod \"openshift-apiserver-operator-796bbdcf4f-q96td\" (UID: \"e4b27d33-ae80-497b-adc2-625f11662e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344481 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7e280918-a8ee-40bf-84b3-6c2ee464003f-etcd-ca\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344513 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s9xv\" (UniqueName: \"kubernetes.io/projected/50f42111-4c58-408b-b2be-f739d494ef28-kube-api-access-6s9xv\") pod \"control-plane-machine-set-operator-78cbb6b69f-xvdpr\" (UID: \"50f42111-4c58-408b-b2be-f739d494ef28\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344546 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344559 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-registration-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344568 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-config\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344608 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqqwc\" (UniqueName: \"kubernetes.io/projected/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-kube-api-access-gqqwc\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344643 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/39bf8b25-13ec-4cf0-ba60-6159836e8f09-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-q4s7j\" (UID: \"39bf8b25-13ec-4cf0-ba60-6159836e8f09\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344669 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-images\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344693 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a4b80150-d70c-4bea-a687-46b452b82f6d-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-cxjlv\" (UID: \"a4b80150-d70c-4bea-a687-46b452b82f6d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344715 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/355f45bc-83fe-49ab-844e-f91a22b8f6fb-apiservice-cert\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344735 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/13866508-4ae9-427a-a2c8-5444fe01b9d2-service-ca-bundle\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344762 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-mountpoint-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344739 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1-config\") pod \"kube-controller-manager-operator-78b949d7b-hcl2z\" (UID: \"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344822 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a4b80150-d70c-4bea-a687-46b452b82f6d-proxy-tls\") pod \"machine-config-controller-84d6567774-cxjlv\" (UID: \"a4b80150-d70c-4bea-a687-46b452b82f6d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344846 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg7dr\" (UniqueName: \"kubernetes.io/projected/39bf8b25-13ec-4cf0-ba60-6159836e8f09-kube-api-access-mg7dr\") pod \"package-server-manager-789f6589d5-q4s7j\" (UID: \"39bf8b25-13ec-4cf0-ba60-6159836e8f09\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344890 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13866508-4ae9-427a-a2c8-5444fe01b9d2-config\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344917 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/50f42111-4c58-408b-b2be-f739d494ef28-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-xvdpr\" (UID: \"50f42111-4c58-408b-b2be-f739d494ef28\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344942 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssxrd\" (UniqueName: \"kubernetes.io/projected/56f640a1-033c-4c5f-b8f7-70bad15eaed3-kube-api-access-ssxrd\") pod \"openshift-config-operator-7777fb866f-p4qj4\" (UID: \"56f640a1-033c-4c5f-b8f7-70bad15eaed3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344963 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/08003b99-9e38-4d7d-a60f-2397dda4f9a6-srv-cert\") pod \"catalog-operator-68c6474976-dhtx2\" (UID: \"08003b99-9e38-4d7d-a60f-2397dda4f9a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344983 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-metrics-tls\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345004 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjzxl\" (UniqueName: \"kubernetes.io/projected/ebcee70b-d418-4bad-b7df-0c46f8cd7422-kube-api-access-fjzxl\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345028 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a64a971-347f-47da-8e47-1443b9ffebcc-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wzcpg\" (UID: \"1a64a971-347f-47da-8e47-1443b9ffebcc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345053 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hcl2z\" (UID: \"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345083 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56f640a1-033c-4c5f-b8f7-70bad15eaed3-serving-cert\") pod \"openshift-config-operator-7777fb866f-p4qj4\" (UID: \"56f640a1-033c-4c5f-b8f7-70bad15eaed3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345108 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b31df7a1-216b-4579-ab74-96986ec8cb1d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345156 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/540b9374-4012-4532-9d68-ef686b30ba78-config-volume\") pod \"dns-default-cxbnt\" (UID: \"540b9374-4012-4532-9d68-ef686b30ba78\") " pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345209 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-trusted-ca\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345236 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7ch5\" (UniqueName: \"kubernetes.io/projected/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-kube-api-access-k7ch5\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345265 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a64a971-347f-47da-8e47-1443b9ffebcc-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wzcpg\" (UID: \"1a64a971-347f-47da-8e47-1443b9ffebcc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345288 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-csi-data-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345319 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tkzn\" (UniqueName: \"kubernetes.io/projected/e2342447-c2fa-4e43-8a78-acde9e70782a-kube-api-access-9tkzn\") pod \"migrator-59844c95c7-z4kxc\" (UID: \"e2342447-c2fa-4e43-8a78-acde9e70782a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345341 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/90c8c074-7442-4155-a948-817b93fec221-cert\") pod \"ingress-canary-sg7pj\" (UID: \"90c8c074-7442-4155-a948-817b93fec221\") " pod="openshift-ingress-canary/ingress-canary-sg7pj" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345374 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3845ed2f-d607-4ba6-9d74-5293cfa722fd-config\") pod \"service-ca-operator-777779d784-rn6fh\" (UID: \"3845ed2f-d607-4ba6-9d74-5293cfa722fd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345417 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebcee70b-d418-4bad-b7df-0c46f8cd7422-config\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345456 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4b27d33-ae80-497b-adc2-625f11662e4d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-q96td\" (UID: \"e4b27d33-ae80-497b-adc2-625f11662e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345479 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p4cm\" (UniqueName: \"kubernetes.io/projected/0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd-kube-api-access-6p4cm\") pod \"machine-config-server-nr7l4\" (UID: \"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd\") " pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345518 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/450b40c8-78a2-4ac7-97ba-56b2b165c0eb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-55845\" (UID: \"450b40c8-78a2-4ac7-97ba-56b2b165c0eb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345560 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnrsf\" (UniqueName: \"kubernetes.io/projected/df146046-34ea-410b-a342-83bc374306d1-kube-api-access-mnrsf\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345587 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-289ml\" (UniqueName: \"kubernetes.io/projected/3845ed2f-d607-4ba6-9d74-5293cfa722fd-kube-api-access-289ml\") pod \"service-ca-operator-777779d784-rn6fh\" (UID: \"3845ed2f-d607-4ba6-9d74-5293cfa722fd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345613 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b31df7a1-216b-4579-ab74-96986ec8cb1d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345635 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e280918-a8ee-40bf-84b3-6c2ee464003f-config\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345663 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mspc\" (UniqueName: \"kubernetes.io/projected/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-kube-api-access-7mspc\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345690 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-oauth-serving-cert\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345711 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-socket-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345763 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1-config\") pod \"kube-controller-manager-operator-78b949d7b-hcl2z\" (UID: \"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.345868 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7e280918-a8ee-40bf-84b3-6c2ee464003f-etcd-ca\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.346125 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/13866508-4ae9-427a-a2c8-5444fe01b9d2-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.346156 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/08003b99-9e38-4d7d-a60f-2397dda4f9a6-profile-collector-cert\") pod \"catalog-operator-68c6474976-dhtx2\" (UID: \"08003b99-9e38-4d7d-a60f-2397dda4f9a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.346280 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-config\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.346415 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-images\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.346521 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7e280918-a8ee-40bf-84b3-6c2ee464003f-etcd-client\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.346563 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-trusted-ca-bundle\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.344713 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ebcee70b-d418-4bad-b7df-0c46f8cd7422-trusted-ca\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.346691 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:57.84667297 +0000 UTC m=+103.430995400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.346692 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/56f640a1-033c-4c5f-b8f7-70bad15eaed3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-p4qj4\" (UID: \"56f640a1-033c-4c5f-b8f7-70bad15eaed3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.346900 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-plugins-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.347102 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4b27d33-ae80-497b-adc2-625f11662e4d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-q96td\" (UID: \"e4b27d33-ae80-497b-adc2-625f11662e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.347922 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-csi-data-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.347959 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3845ed2f-d607-4ba6-9d74-5293cfa722fd-config\") pod \"service-ca-operator-777779d784-rn6fh\" (UID: \"3845ed2f-d607-4ba6-9d74-5293cfa722fd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.348137 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-trusted-ca\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.348660 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebcee70b-d418-4bad-b7df-0c46f8cd7422-config\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.349015 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a4b80150-d70c-4bea-a687-46b452b82f6d-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-cxjlv\" (UID: \"a4b80150-d70c-4bea-a687-46b452b82f6d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.349837 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13866508-4ae9-427a-a2c8-5444fe01b9d2-config\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.349850 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3845ed2f-d607-4ba6-9d74-5293cfa722fd-serving-cert\") pod \"service-ca-operator-777779d784-rn6fh\" (UID: \"3845ed2f-d607-4ba6-9d74-5293cfa722fd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.350256 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13866508-4ae9-427a-a2c8-5444fe01b9d2-serving-cert\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.350573 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-proxy-tls\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.351115 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebcee70b-d418-4bad-b7df-0c46f8cd7422-serving-cert\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.351640 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/df146046-34ea-410b-a342-83bc374306d1-socket-dir\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.352236 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a64a971-347f-47da-8e47-1443b9ffebcc-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wzcpg\" (UID: \"1a64a971-347f-47da-8e47-1443b9ffebcc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.351692 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/540b9374-4012-4532-9d68-ef686b30ba78-config-volume\") pod \"dns-default-cxbnt\" (UID: \"540b9374-4012-4532-9d68-ef686b30ba78\") " pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.352605 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/450b40c8-78a2-4ac7-97ba-56b2b165c0eb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-55845\" (UID: \"450b40c8-78a2-4ac7-97ba-56b2b165c0eb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.352676 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/355f45bc-83fe-49ab-844e-f91a22b8f6fb-webhook-cert\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.352866 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-oauth-serving-cert\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.352937 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b31df7a1-216b-4579-ab74-96986ec8cb1d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.353044 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/355f45bc-83fe-49ab-844e-f91a22b8f6fb-apiservice-cert\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.353048 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e280918-a8ee-40bf-84b3-6c2ee464003f-config\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.353501 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/540b9374-4012-4532-9d68-ef686b30ba78-metrics-tls\") pod \"dns-default-cxbnt\" (UID: \"540b9374-4012-4532-9d68-ef686b30ba78\") " pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.353603 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/39bf8b25-13ec-4cf0-ba60-6159836e8f09-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-q4s7j\" (UID: \"39bf8b25-13ec-4cf0-ba60-6159836e8f09\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.354155 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a4b80150-d70c-4bea-a687-46b452b82f6d-proxy-tls\") pod \"machine-config-controller-84d6567774-cxjlv\" (UID: \"a4b80150-d70c-4bea-a687-46b452b82f6d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.354883 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-serving-cert\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.355182 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hcl2z\" (UID: \"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.355923 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4b27d33-ae80-497b-adc2-625f11662e4d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-q96td\" (UID: \"e4b27d33-ae80-497b-adc2-625f11662e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.356039 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-oauth-config\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.356066 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56f640a1-033c-4c5f-b8f7-70bad15eaed3-serving-cert\") pod \"openshift-config-operator-7777fb866f-p4qj4\" (UID: \"56f640a1-033c-4c5f-b8f7-70bad15eaed3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.356669 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/b31df7a1-216b-4579-ab74-96986ec8cb1d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.356748 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1415693c-1ea2-42b6-8e90-5bebbb33db90-signing-key\") pod \"service-ca-9c57cc56f-z9sq9\" (UID: \"1415693c-1ea2-42b6-8e90-5bebbb33db90\") " pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.357049 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd-certs\") pod \"machine-config-server-nr7l4\" (UID: \"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd\") " pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.357578 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e280918-a8ee-40bf-84b3-6c2ee464003f-serving-cert\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.358054 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/50f42111-4c58-408b-b2be-f739d494ef28-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-xvdpr\" (UID: \"50f42111-4c58-408b-b2be-f739d494ef28\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.358536 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a64a971-347f-47da-8e47-1443b9ffebcc-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wzcpg\" (UID: \"1a64a971-347f-47da-8e47-1443b9ffebcc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.358616 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/90c8c074-7442-4155-a948-817b93fec221-cert\") pod \"ingress-canary-sg7pj\" (UID: \"90c8c074-7442-4155-a948-817b93fec221\") " pod="openshift-ingress-canary/ingress-canary-sg7pj" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.358742 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd-node-bootstrap-token\") pod \"machine-config-server-nr7l4\" (UID: \"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd\") " pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.360179 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/08003b99-9e38-4d7d-a60f-2397dda4f9a6-srv-cert\") pod \"catalog-operator-68c6474976-dhtx2\" (UID: \"08003b99-9e38-4d7d-a60f-2397dda4f9a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.361609 4711 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.368318 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-metrics-tls\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.381497 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.399905 4711 request.go:700] Waited for 1.907885753s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager-operator/serviceaccounts/openshift-controller-manager-operator/token Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.416804 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pljkk\" (UniqueName: \"kubernetes.io/projected/ef65fb95-ada6-43cb-8847-aefb174133bb-kube-api-access-pljkk\") pod \"openshift-controller-manager-operator-756b6f6bc6-xvjz5\" (UID: \"ef65fb95-ada6-43cb-8847-aefb174133bb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.421848 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.441755 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.447465 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.448449 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:57.948426581 +0000 UTC m=+103.532748911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.462218 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.481840 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.501503 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.521557 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.549260 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.549885 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.049861533 +0000 UTC m=+103.634183863 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.579043 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.579796 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r95d\" (UniqueName: \"kubernetes.io/projected/564b439c-0ac6-43d5-afa8-2379ea73a71e-kube-api-access-2r95d\") pod \"collect-profiles-29415600-csfhl\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.596011 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbhmt\" (UniqueName: \"kubernetes.io/projected/0c96a75b-8c13-4da0-abcb-95855f1fbac5-kube-api-access-tbhmt\") pod \"marketplace-operator-79b997595-dmv5p\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.596181 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.617128 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9727afba-153c-4f79-9101-ccbd7e497ebe-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-n5vr8\" (UID: \"9727afba-153c-4f79-9101-ccbd7e497ebe\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.638252 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8v8f\" (UniqueName: \"kubernetes.io/projected/7c4e1e97-a3ae-414f-b12f-8b2463478934-kube-api-access-s8v8f\") pod \"machine-api-operator-5694c8668f-plvn8\" (UID: \"7c4e1e97-a3ae-414f-b12f-8b2463478934\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.651541 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.651774 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.151730176 +0000 UTC m=+103.736052516 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.652255 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.652769 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.152757362 +0000 UTC m=+103.737079682 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.664648 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtphk\" (UniqueName: \"kubernetes.io/projected/0206d524-6952-4adb-a5a6-2dc7fc8a60a5-kube-api-access-qtphk\") pod \"multus-admission-controller-857f4d67dd-sszzb\" (UID: \"0206d524-6952-4adb-a5a6-2dc7fc8a60a5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.677317 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlw4g\" (UniqueName: \"kubernetes.io/projected/e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5-kube-api-access-tlw4g\") pod \"olm-operator-6b444d44fb-sr9lg\" (UID: \"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.704604 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmm47\" (UniqueName: \"kubernetes.io/projected/39063f31-28f4-4464-b29e-2589d64907bc-kube-api-access-xmm47\") pod \"router-default-5444994796-2f9kw\" (UID: \"39063f31-28f4-4464-b29e-2589d64907bc\") " pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.720260 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4nfp\" (UniqueName: \"kubernetes.io/projected/d454c199-9738-4862-a556-094087bde5b1-kube-api-access-c4nfp\") pod \"downloads-7954f5f757-lrpw8\" (UID: \"d454c199-9738-4862-a556-094087bde5b1\") " pod="openshift-console/downloads-7954f5f757-lrpw8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.739320 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ql8p\" (UniqueName: \"kubernetes.io/projected/b3dbd950-893e-4a2f-866f-8c538e7371e0-kube-api-access-5ql8p\") pod \"apiserver-76f77b778f-t5m9v\" (UID: \"b3dbd950-893e-4a2f-866f-8c538e7371e0\") " pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.754319 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.754514 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.254477711 +0000 UTC m=+103.838800041 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.754838 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.755313 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.255302601 +0000 UTC m=+103.839624931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.757110 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm5dw\" (UniqueName: \"kubernetes.io/projected/04563888-6e73-437a-99b3-9dfa5662ff33-kube-api-access-zm5dw\") pod \"controller-manager-879f6c89f-6lhz6\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.762329 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.777423 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.777769 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sblv\" (UniqueName: \"kubernetes.io/projected/51156375-440f-4e82-8fac-dea612cb45ed-kube-api-access-4sblv\") pod \"apiserver-7bbb656c7d-wkn79\" (UID: \"51156375-440f-4e82-8fac-dea612cb45ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.788278 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5"] Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.797030 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/44d0e064-79f2-4614-92ac-3a4d44cdbe92-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-pqd9t\" (UID: \"44d0e064-79f2-4614-92ac-3a4d44cdbe92\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.803144 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.817640 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl"] Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.824794 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7sf7\" (UniqueName: \"kubernetes.io/projected/a9d5f602-e7a7-4f57-9696-0b020b8a9e3f-kube-api-access-t7sf7\") pod \"kube-storage-version-migrator-operator-b67b599dd-fdd5c\" (UID: \"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: W1205 12:10:57.827733 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod564b439c_0ac6_43d5_afa8_2379ea73a71e.slice/crio-82feff36be0a451bc9c5811206eda114f8934bf951cce247c67f2c495a88cb50 WatchSource:0}: Error finding container 82feff36be0a451bc9c5811206eda114f8934bf951cce247c67f2c495a88cb50: Status 404 returned error can't find the container with id 82feff36be0a451bc9c5811206eda114f8934bf951cce247c67f2c495a88cb50 Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.829897 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-lrpw8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.834261 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8vqv\" (UniqueName: \"kubernetes.io/projected/06f3328e-2709-4545-8e34-a253878a788a-kube-api-access-l8vqv\") pod \"machine-approver-56656f9798-jncdg\" (UID: \"06f3328e-2709-4545-8e34-a253878a788a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.838053 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.845237 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.854019 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.855656 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.855796 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.35576368 +0000 UTC m=+103.940086010 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.856009 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.856442 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.356431766 +0000 UTC m=+103.940754096 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.861208 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.871557 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.873542 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stsln\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-kube-api-access-stsln\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.892120 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb7xw\" (UniqueName: \"kubernetes.io/projected/e5514b5e-1f9a-42b8-8db2-56b5cb965f97-kube-api-access-cb7xw\") pod \"dns-operator-744455d44c-5gn9w\" (UID: \"e5514b5e-1f9a-42b8-8db2-56b5cb965f97\") " pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.900938 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ltkp\" (UniqueName: \"kubernetes.io/projected/4dc833de-df68-4794-9093-5698c85833cc-kube-api-access-7ltkp\") pod \"oauth-openshift-558db77b4-k4w4c\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.912417 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.918184 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-bound-sa-token\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.928636 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.938703 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5r9n\" (UniqueName: \"kubernetes.io/projected/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-kube-api-access-d5r9n\") pod \"route-controller-manager-6576b87f9c-x86pf\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:57 crc kubenswrapper[4711]: W1205 12:10:57.949107 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39063f31_28f4_4464_b29e_2589d64907bc.slice/crio-40a45f369a6318047f5a3ff2e0e537f784584037ee19e91f9ddea9750cbcdaa4 WatchSource:0}: Error finding container 40a45f369a6318047f5a3ff2e0e537f784584037ee19e91f9ddea9750cbcdaa4: Status 404 returned error can't find the container with id 40a45f369a6318047f5a3ff2e0e537f784584037ee19e91f9ddea9750cbcdaa4 Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.957619 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:57 crc kubenswrapper[4711]: E1205 12:10:57.958363 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.458343031 +0000 UTC m=+104.042665361 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.960722 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.976196 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75tn8\" (UniqueName: \"kubernetes.io/projected/540b9374-4012-4532-9d68-ef686b30ba78-kube-api-access-75tn8\") pod \"dns-default-cxbnt\" (UID: \"540b9374-4012-4532-9d68-ef686b30ba78\") " pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:57 crc kubenswrapper[4711]: I1205 12:10:57.979720 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj8df\" (UniqueName: \"kubernetes.io/projected/b31df7a1-216b-4579-ab74-96986ec8cb1d-kube-api-access-fj8df\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.003783 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.007946 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.030714 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pvr2\" (UniqueName: \"kubernetes.io/projected/7e280918-a8ee-40bf-84b3-6c2ee464003f-kube-api-access-8pvr2\") pod \"etcd-operator-b45778765-wz6zl\" (UID: \"7e280918-a8ee-40bf-84b3-6c2ee464003f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.036556 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-plvn8"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.036882 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.044667 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.046223 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtsdh\" (UniqueName: \"kubernetes.io/projected/1415693c-1ea2-42b6-8e90-5bebbb33db90-kube-api-access-gtsdh\") pod \"service-ca-9c57cc56f-z9sq9\" (UID: \"1415693c-1ea2-42b6-8e90-5bebbb33db90\") " pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.050844 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t5m9v"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.059355 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.059802 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.559783793 +0000 UTC m=+104.144106123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.063735 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9znl\" (UniqueName: \"kubernetes.io/projected/355f45bc-83fe-49ab-844e-f91a22b8f6fb-kube-api-access-w9znl\") pod \"packageserver-d55dfcdfc-rfdvq\" (UID: \"355f45bc-83fe-49ab-844e-f91a22b8f6fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.079870 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.086617 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.102949 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hcl2z\" (UID: \"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:58 crc kubenswrapper[4711]: W1205 12:10:58.105132 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06f3328e_2709_4545_8e34_a253878a788a.slice/crio-8467e17bdae4cfbee34afd35bcf1b5d3a33d8c9337948456138bef32c667eb68 WatchSource:0}: Error finding container 8467e17bdae4cfbee34afd35bcf1b5d3a33d8c9337948456138bef32c667eb68: Status 404 returned error can't find the container with id 8467e17bdae4cfbee34afd35bcf1b5d3a33d8c9337948456138bef32c667eb68 Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.113107 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.115142 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s9xv\" (UniqueName: \"kubernetes.io/projected/50f42111-4c58-408b-b2be-f739d494ef28-kube-api-access-6s9xv\") pod \"control-plane-machine-set-operator-78cbb6b69f-xvdpr\" (UID: \"50f42111-4c58-408b-b2be-f739d494ef28\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.127553 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-cxbnt" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.135048 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66h5k\" (UniqueName: \"kubernetes.io/projected/e4b27d33-ae80-497b-adc2-625f11662e4d-kube-api-access-66h5k\") pod \"openshift-apiserver-operator-796bbdcf4f-q96td\" (UID: \"e4b27d33-ae80-497b-adc2-625f11662e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.156117 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbrvn\" (UniqueName: \"kubernetes.io/projected/a4b80150-d70c-4bea-a687-46b452b82f6d-kube-api-access-rbrvn\") pod \"machine-config-controller-84d6567774-cxjlv\" (UID: \"a4b80150-d70c-4bea-a687-46b452b82f6d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.160831 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.161597 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.661575155 +0000 UTC m=+104.245897485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.185407 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh9mq\" (UniqueName: \"kubernetes.io/projected/13866508-4ae9-427a-a2c8-5444fe01b9d2-kube-api-access-wh9mq\") pod \"authentication-operator-69f744f599-xn94x\" (UID: \"13866508-4ae9-427a-a2c8-5444fe01b9d2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.187775 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.189065 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4z29\" (UniqueName: \"kubernetes.io/projected/90c8c074-7442-4155-a948-817b93fec221-kube-api-access-c4z29\") pod \"ingress-canary-sg7pj\" (UID: \"90c8c074-7442-4155-a948-817b93fec221\") " pod="openshift-ingress-canary/ingress-canary-sg7pj" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.207315 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8b6z\" (UniqueName: \"kubernetes.io/projected/450b40c8-78a2-4ac7-97ba-56b2b165c0eb-kube-api-access-c8b6z\") pod \"cluster-samples-operator-665b6dd947-55845\" (UID: \"450b40c8-78a2-4ac7-97ba-56b2b165c0eb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.208574 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.218844 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" Dec 05 12:10:58 crc kubenswrapper[4711]: W1205 12:10:58.227775 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44d0e064_79f2_4614_92ac_3a4d44cdbe92.slice/crio-3b170dab8431f1f45a87c00c9123f867f24d362633309b4302c3838a6a091dca WatchSource:0}: Error finding container 3b170dab8431f1f45a87c00c9123f867f24d362633309b4302c3838a6a091dca: Status 404 returned error can't find the container with id 3b170dab8431f1f45a87c00c9123f867f24d362633309b4302c3838a6a091dca Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.232253 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqqwc\" (UniqueName: \"kubernetes.io/projected/2f11fc52-29a6-4b09-8768-2e9b1d9a4a09-kube-api-access-gqqwc\") pod \"machine-config-operator-74547568cd-rkds8\" (UID: \"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.237879 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.274542 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.276940 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.277479 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.277955 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.280082 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.780054526 +0000 UTC m=+104.364376856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.306216 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssxrd\" (UniqueName: \"kubernetes.io/projected/56f640a1-033c-4c5f-b8f7-70bad15eaed3-kube-api-access-ssxrd\") pod \"openshift-config-operator-7777fb866f-p4qj4\" (UID: \"56f640a1-033c-4c5f-b8f7-70bad15eaed3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.306925 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b31df7a1-216b-4579-ab74-96986ec8cb1d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lv82p\" (UID: \"b31df7a1-216b-4579-ab74-96986ec8cb1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.309639 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.311091 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7ch5\" (UniqueName: \"kubernetes.io/projected/c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4-kube-api-access-k7ch5\") pod \"ingress-operator-5b745b69d9-zxjsn\" (UID: \"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.324486 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.325958 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.335572 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" event={"ID":"9727afba-153c-4f79-9101-ccbd7e497ebe","Type":"ContainerStarted","Data":"39fe1a1b69208533573bd0e96e84704fb3ef97b9666e7b3b2cbe26ff39cd6be9"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.337665 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.339422 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" event={"ID":"51156375-440f-4e82-8fac-dea612cb45ed","Type":"ContainerStarted","Data":"45f96d084f120cc51a61ac6998a2b413bd60e0a625fabe0608303698167d1c68"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.356015 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.356735 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p4cm\" (UniqueName: \"kubernetes.io/projected/0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd-kube-api-access-6p4cm\") pod \"machine-config-server-nr7l4\" (UID: \"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd\") " pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.359652 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" event={"ID":"7c4e1e97-a3ae-414f-b12f-8b2463478934","Type":"ContainerStarted","Data":"25b0746b233752d7c5ba5bf6107b093863b0be99179b55856eee8a5b83c1ae58"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.360478 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnrsf\" (UniqueName: \"kubernetes.io/projected/df146046-34ea-410b-a342-83bc374306d1-kube-api-access-mnrsf\") pod \"csi-hostpathplugin-rb2nh\" (UID: \"df146046-34ea-410b-a342-83bc374306d1\") " pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.360760 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" event={"ID":"44d0e064-79f2-4614-92ac-3a4d44cdbe92","Type":"ContainerStarted","Data":"3b170dab8431f1f45a87c00c9123f867f24d362633309b4302c3838a6a091dca"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.363536 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-289ml\" (UniqueName: \"kubernetes.io/projected/3845ed2f-d607-4ba6-9d74-5293cfa722fd-kube-api-access-289ml\") pod \"service-ca-operator-777779d784-rn6fh\" (UID: \"3845ed2f-d607-4ba6-9d74-5293cfa722fd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.364048 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2f9kw" event={"ID":"39063f31-28f4-4464-b29e-2589d64907bc","Type":"ContainerStarted","Data":"40a45f369a6318047f5a3ff2e0e537f784584037ee19e91f9ddea9750cbcdaa4"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.366005 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" event={"ID":"b3dbd950-893e-4a2f-866f-8c538e7371e0","Type":"ContainerStarted","Data":"e167b8486a217f5613e23da658504debdcc560b4245385cd005af21e2706dd06"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.371515 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a64a971-347f-47da-8e47-1443b9ffebcc-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wzcpg\" (UID: \"1a64a971-347f-47da-8e47-1443b9ffebcc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.371674 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.377831 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.381666 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.881642282 +0000 UTC m=+104.465964602 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.382813 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.382832 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" event={"ID":"ef65fb95-ada6-43cb-8847-aefb174133bb","Type":"ContainerStarted","Data":"2886c5b8f24c26d68dacb8fe23b9fbe991cb3188a0041f873e28e871ab50ae07"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.382873 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" event={"ID":"ef65fb95-ada6-43cb-8847-aefb174133bb","Type":"ContainerStarted","Data":"ed388b782d34a5dcdeee04a8afec18c1fbab2eeef45328b8dbd84fcbb0644ccb"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.385848 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" event={"ID":"564b439c-0ac6-43d5-afa8-2379ea73a71e","Type":"ContainerStarted","Data":"e8a761d8a23b75eb445fcc0ab19366eb184e0a7bb2921912e2034b73842f6c7f"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.385931 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" event={"ID":"564b439c-0ac6-43d5-afa8-2379ea73a71e","Type":"ContainerStarted","Data":"82feff36be0a451bc9c5811206eda114f8934bf951cce247c67f2c495a88cb50"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.388252 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" event={"ID":"06f3328e-2709-4545-8e34-a253878a788a","Type":"ContainerStarted","Data":"8467e17bdae4cfbee34afd35bcf1b5d3a33d8c9337948456138bef32c667eb68"} Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.392339 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tkzn\" (UniqueName: \"kubernetes.io/projected/e2342447-c2fa-4e43-8a78-acde9e70782a-kube-api-access-9tkzn\") pod \"migrator-59844c95c7-z4kxc\" (UID: \"e2342447-c2fa-4e43-8a78-acde9e70782a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.403193 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjzxl\" (UniqueName: \"kubernetes.io/projected/ebcee70b-d418-4bad-b7df-0c46f8cd7422-kube-api-access-fjzxl\") pod \"console-operator-58897d9998-4stqf\" (UID: \"ebcee70b-d418-4bad-b7df-0c46f8cd7422\") " pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.406303 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-lrpw8"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.416833 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-sg7pj" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.419370 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg7dr\" (UniqueName: \"kubernetes.io/projected/39bf8b25-13ec-4cf0-ba60-6159836e8f09-kube-api-access-mg7dr\") pod \"package-server-manager-789f6589d5-q4s7j\" (UID: \"39bf8b25-13ec-4cf0-ba60-6159836e8f09\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.419515 4711 request.go:700] Waited for 1.068477196s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/serviceaccounts/console/token Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.434957 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-nr7l4" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.453693 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mspc\" (UniqueName: \"kubernetes.io/projected/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-kube-api-access-7mspc\") pod \"console-f9d7485db-sr8f2\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.453986 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.464243 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtt5z\" (UniqueName: \"kubernetes.io/projected/08003b99-9e38-4d7d-a60f-2397dda4f9a6-kube-api-access-mtt5z\") pod \"catalog-operator-68c6474976-dhtx2\" (UID: \"08003b99-9e38-4d7d-a60f-2397dda4f9a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.483779 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.485326 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:58.98531248 +0000 UTC m=+104.569634810 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.566510 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.581938 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.585907 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.586121 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.086062456 +0000 UTC m=+104.670384786 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.586271 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.586987 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.086939007 +0000 UTC m=+104.671261337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.588864 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.600491 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.627809 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.665418 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.687379 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.687904 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.187882798 +0000 UTC m=+104.772205138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.693978 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.710983 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.764744 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sszzb"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.774543 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dmv5p"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.788829 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.789166 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.289153056 +0000 UTC m=+104.873475376 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: W1205 12:10:58.889793 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0206d524_6952_4adb_a5a6_2dc7fc8a60a5.slice/crio-2c59dc31ce6368889e37b580e391700a4cc7bcfa07c201eedfc9bbd180988a02 WatchSource:0}: Error finding container 2c59dc31ce6368889e37b580e391700a4cc7bcfa07c201eedfc9bbd180988a02: Status 404 returned error can't find the container with id 2c59dc31ce6368889e37b580e391700a4cc7bcfa07c201eedfc9bbd180988a02 Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.894842 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.895331 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.395297485 +0000 UTC m=+104.979619815 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.993532 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6lhz6"] Dec 05 12:10:58 crc kubenswrapper[4711]: I1205 12:10:58.997704 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:58 crc kubenswrapper[4711]: E1205 12:10:58.998585 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.498567073 +0000 UTC m=+105.082889403 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.035765 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wz6zl"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.064267 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.103841 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.104454 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.604429064 +0000 UTC m=+105.188751394 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.194465 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-cxbnt"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.205435 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.205962 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.705945918 +0000 UTC m=+105.290268238 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.255996 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-z9sq9"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.306478 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.306688 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.806659323 +0000 UTC m=+105.390981643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.306785 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.307159 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.807146745 +0000 UTC m=+105.391469075 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: W1205 12:10:59.356877 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1415693c_1ea2_42b6_8e90_5bebbb33db90.slice/crio-9d088b06fa95976bc40fa5806d454dd9210b814b18e3a4e96ee6bf2f21460c12 WatchSource:0}: Error finding container 9d088b06fa95976bc40fa5806d454dd9210b814b18e3a4e96ee6bf2f21460c12: Status 404 returned error can't find the container with id 9d088b06fa95976bc40fa5806d454dd9210b814b18e3a4e96ee6bf2f21460c12 Dec 05 12:10:59 crc kubenswrapper[4711]: W1205 12:10:59.362843 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod540b9374_4012_4532_9d68_ef686b30ba78.slice/crio-a094e89e7ec422a2c5687ab2cadf383db7c3ccb4416bc649e8e985453fd4dc4a WatchSource:0}: Error finding container a094e89e7ec422a2c5687ab2cadf383db7c3ccb4416bc649e8e985453fd4dc4a: Status 404 returned error can't find the container with id a094e89e7ec422a2c5687ab2cadf383db7c3ccb4416bc649e8e985453fd4dc4a Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.412809 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.413949 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5gn9w"] Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.416163 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.916125573 +0000 UTC m=+105.500447903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.417048 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.417605 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:10:59.917586109 +0000 UTC m=+105.501908439 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.455524 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-k4w4c"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.509369 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xn94x"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.512644 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" event={"ID":"44d0e064-79f2-4614-92ac-3a4d44cdbe92","Type":"ContainerStarted","Data":"a5213b2815705626701d64dd3df721d682a6264ea766c24ee386f464d1ab5722"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.522522 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.523436 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.02340817 +0000 UTC m=+105.607730500 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.523653 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" event={"ID":"04563888-6e73-437a-99b3-9dfa5662ff33","Type":"ContainerStarted","Data":"86f371e247e355176a0ffc615120baf258676ec4968063d6a2d90c08631d3425"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.532659 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.535611 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" event={"ID":"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4","Type":"ContainerStarted","Data":"f1e24b2a9949afc5cf12636d27c0caa1f12402bc14fda6ca23bb48fc320da27d"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.540556 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.547805 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.558059 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z"] Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.568264 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2f9kw" event={"ID":"39063f31-28f4-4464-b29e-2589d64907bc","Type":"ContainerStarted","Data":"35b971f6e2b46146cf9c21608ab4a9c255209747dda0dd211cf7b9c258b8912d"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.575237 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" event={"ID":"06f3328e-2709-4545-8e34-a253878a788a","Type":"ContainerStarted","Data":"09060ee11d2d09dd44dd92913616023046e9f0d51aff87e59cd9dcb8ed3fc197"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.578152 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" event={"ID":"9727afba-153c-4f79-9101-ccbd7e497ebe","Type":"ContainerStarted","Data":"b886ca34ead1a9128cd967e0b455ee937e0fcf4326c9c7d1426f8744ac4c2001"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.582769 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" event={"ID":"0c96a75b-8c13-4da0-abcb-95855f1fbac5","Type":"ContainerStarted","Data":"809581b1d783d89cc69e48efe174c3d00813dc5b80e33dfb39c6286154b0bb47"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.585284 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-lrpw8" event={"ID":"d454c199-9738-4862-a556-094087bde5b1","Type":"ContainerStarted","Data":"858e8d59e4267763b0cfcf16cc3b53e2b5eaaa36cd219ac53cdc71d1cfd78b81"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.591744 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" event={"ID":"0206d524-6952-4adb-a5a6-2dc7fc8a60a5","Type":"ContainerStarted","Data":"2c59dc31ce6368889e37b580e391700a4cc7bcfa07c201eedfc9bbd180988a02"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.597551 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" event={"ID":"7e280918-a8ee-40bf-84b3-6c2ee464003f","Type":"ContainerStarted","Data":"7d44fe20255cf7ec47f0e89aaef5fd31219e35fc18658e9a9e13a5de17d43557"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.599758 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" event={"ID":"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5","Type":"ContainerStarted","Data":"a28fcf8bbb9110eed47d614f1905bb2078cef15c58cc3d478fc6a25b3939cdbc"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.613090 4711 generic.go:334] "Generic (PLEG): container finished" podID="51156375-440f-4e82-8fac-dea612cb45ed" containerID="c266f128fd1e6b84249ffa887561313527ea72e7fc94aea48e0684bbeec346fe" exitCode=0 Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.613587 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" event={"ID":"51156375-440f-4e82-8fac-dea612cb45ed","Type":"ContainerDied","Data":"c266f128fd1e6b84249ffa887561313527ea72e7fc94aea48e0684bbeec346fe"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.625701 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.627205 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.12718237 +0000 UTC m=+105.711504900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.635257 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" event={"ID":"7c4e1e97-a3ae-414f-b12f-8b2463478934","Type":"ContainerStarted","Data":"3eb5b93c09ad24e3d5f1f2fb2d72e1d20d6dcd50cd253e33afe9cda37ccb4a3b"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.649241 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-nr7l4" event={"ID":"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd","Type":"ContainerStarted","Data":"ffde8c1f138fd40fd40bbb70d6466851440b08d54a6e927752c1d032e3882bc2"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.653661 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" event={"ID":"1415693c-1ea2-42b6-8e90-5bebbb33db90","Type":"ContainerStarted","Data":"9d088b06fa95976bc40fa5806d454dd9210b814b18e3a4e96ee6bf2f21460c12"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.664559 4711 generic.go:334] "Generic (PLEG): container finished" podID="b3dbd950-893e-4a2f-866f-8c538e7371e0" containerID="aeaf7202330a50af5e8a83b6ed9ec7ed7531a2f4e4d32ef9ab6c8f283cbe57fa" exitCode=0 Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.665550 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" event={"ID":"b3dbd950-893e-4a2f-866f-8c538e7371e0","Type":"ContainerDied","Data":"aeaf7202330a50af5e8a83b6ed9ec7ed7531a2f4e4d32ef9ab6c8f283cbe57fa"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.670884 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" event={"ID":"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f","Type":"ContainerStarted","Data":"d61d1f88952de99da8887f577b2e87086ddc561fad54d340ddbf3bda8eeb5d04"} Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.729130 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.734156 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.234125877 +0000 UTC m=+105.818448207 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.832333 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.832972 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.332932906 +0000 UTC m=+105.917255236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.859350 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.875979 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:10:59 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:10:59 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:10:59 crc kubenswrapper[4711]: healthz check failed Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.876055 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:10:59 crc kubenswrapper[4711]: W1205 12:10:59.914434 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4dc833de_df68_4794_9093_5698c85833cc.slice/crio-1b4635786244777d18cb8ac8017bc6b77538ad68022c26d86a3d7de642f287ef WatchSource:0}: Error finding container 1b4635786244777d18cb8ac8017bc6b77538ad68022c26d86a3d7de642f287ef: Status 404 returned error can't find the container with id 1b4635786244777d18cb8ac8017bc6b77538ad68022c26d86a3d7de642f287ef Dec 05 12:10:59 crc kubenswrapper[4711]: I1205 12:10:59.936457 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:10:59 crc kubenswrapper[4711]: E1205 12:10:59.936961 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.436935571 +0000 UTC m=+106.021257901 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.013620 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.039863 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.040540 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.540522477 +0000 UTC m=+106.124844807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.044471 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.049773 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.134908 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-2f9kw" podStartSLOduration=78.134888946 podStartE2EDuration="1m18.134888946s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:00.133289096 +0000 UTC m=+105.717611426" watchObservedRunningTime="2025-12-05 12:11:00.134888946 +0000 UTC m=+105.719211306" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.142012 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.142537 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.642518203 +0000 UTC m=+106.226840533 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.154283 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod564b439c_0ac6_43d5_afa8_2379ea73a71e.slice/crio-e8a761d8a23b75eb445fcc0ab19366eb184e0a7bb2921912e2034b73842f6c7f.scope\": RecentStats: unable to find data in memory cache]" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.196172 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n5vr8" podStartSLOduration=78.196152211 podStartE2EDuration="1m18.196152211s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:00.19447126 +0000 UTC m=+105.778793610" watchObservedRunningTime="2025-12-05 12:11:00.196152211 +0000 UTC m=+105.780474541" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.244482 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.244862 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.744848408 +0000 UTC m=+106.329170738 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.329868 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqd9t" podStartSLOduration=79.329843027 podStartE2EDuration="1m19.329843027s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:00.276298371 +0000 UTC m=+105.860620701" watchObservedRunningTime="2025-12-05 12:11:00.329843027 +0000 UTC m=+105.914165357" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.345862 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.346233 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.846217858 +0000 UTC m=+106.430540178 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.391850 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xvjz5" podStartSLOduration=78.391823629 podStartE2EDuration="1m18.391823629s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:00.333788934 +0000 UTC m=+105.918111274" watchObservedRunningTime="2025-12-05 12:11:00.391823629 +0000 UTC m=+105.976145959" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.449076 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.449567 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:00.949552168 +0000 UTC m=+106.533874498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.452059 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" podStartSLOduration=79.452046619 podStartE2EDuration="1m19.452046619s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:00.398636227 +0000 UTC m=+105.982958557" watchObservedRunningTime="2025-12-05 12:11:00.452046619 +0000 UTC m=+106.036368949" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.461993 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-sg7pj"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.467838 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-sr8f2"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.550810 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.551339 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.051318809 +0000 UTC m=+106.635641139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: W1205 12:11:00.600030 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f652b9a_5eb6_4066_84f7_dc1a7e09f038.slice/crio-ecb53aa652ff3c5b20bece632564f5a94d77f136177b68122942732272fce01a WatchSource:0}: Error finding container ecb53aa652ff3c5b20bece632564f5a94d77f136177b68122942732272fce01a: Status 404 returned error can't find the container with id ecb53aa652ff3c5b20bece632564f5a94d77f136177b68122942732272fce01a Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.654271 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.664659 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rb2nh"] Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.664733 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.164715485 +0000 UTC m=+106.749037815 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.673365 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.676612 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.680652 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4stqf"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.721802 4711 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-dmv5p container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.721858 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.728071 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.728129 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.728154 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" event={"ID":"0c96a75b-8c13-4da0-abcb-95855f1fbac5","Type":"ContainerStarted","Data":"15f2f472ca86dae38f122ec3bc11940efed779327aead770c8b5ebae6661f214"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.733674 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn"] Dec 05 12:11:00 crc kubenswrapper[4711]: W1205 12:11:00.754668 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f11fc52_29a6_4b09_8768_2e9b1d9a4a09.slice/crio-4fb942eaefd8a2e7c518126327478c66cba30d9d317eb752c106ed8ca39937af WatchSource:0}: Error finding container 4fb942eaefd8a2e7c518126327478c66cba30d9d317eb752c106ed8ca39937af: Status 404 returned error can't find the container with id 4fb942eaefd8a2e7c518126327478c66cba30d9d317eb752c106ed8ca39937af Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.755080 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" event={"ID":"e5514b5e-1f9a-42b8-8db2-56b5cb965f97","Type":"ContainerStarted","Data":"1328fe87130c92f714e1f7405d28c1de624e8f1b2b084845dcd06c297a9822b7"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.755960 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.756166 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.256133311 +0000 UTC m=+106.840455631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.756339 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.756471 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.756869 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.25685552 +0000 UTC m=+106.841177840 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: W1205 12:11:00.759227 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf146046_34ea_410b_a342_83bc374306d1.slice/crio-7671cdf15569d067cae7a8460f2675dbb3984409afadb2974763c0a94a5f1724 WatchSource:0}: Error finding container 7671cdf15569d067cae7a8460f2675dbb3984409afadb2974763c0a94a5f1724: Status 404 returned error can't find the container with id 7671cdf15569d067cae7a8460f2675dbb3984409afadb2974763c0a94a5f1724 Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.770506 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5afbc1-d1a4-40c8-990d-72a8169d5072-metrics-certs\") pod \"network-metrics-daemon-hv9gb\" (UID: \"fa5afbc1-d1a4-40c8-990d-72a8169d5072\") " pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.773872 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.779576 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.785924 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" event={"ID":"355f45bc-83fe-49ab-844e-f91a22b8f6fb","Type":"ContainerStarted","Data":"aca4177607b1e48a387d25ec5a6ba61fc00d372cd82495be25cf663a03b30144"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.792793 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" event={"ID":"13866508-4ae9-427a-a2c8-5444fe01b9d2","Type":"ContainerStarted","Data":"6f472e7246f6729d45dadee4868f2f143f30302f5875f52948cab3d3fdc00fdb"} Dec 05 12:11:00 crc kubenswrapper[4711]: W1205 12:11:00.801439 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8ec9977_60ff_48aa_a0d4_371b5ffbb2a4.slice/crio-9c8e3abe974fd8f06330425a913c7d5987b68cc33a4632265e89caef34e6eee3 WatchSource:0}: Error finding container 9c8e3abe974fd8f06330425a913c7d5987b68cc33a4632265e89caef34e6eee3: Status 404 returned error can't find the container with id 9c8e3abe974fd8f06330425a913c7d5987b68cc33a4632265e89caef34e6eee3 Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.803425 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.805642 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc"] Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.825143 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" event={"ID":"06f3328e-2709-4545-8e34-a253878a788a","Type":"ContainerStarted","Data":"3a30d995142f9dce2adba70dbce781d58af5e8be5fb8c509aaddfb7c44bc0a25"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.827966 4711 generic.go:334] "Generic (PLEG): container finished" podID="564b439c-0ac6-43d5-afa8-2379ea73a71e" containerID="e8a761d8a23b75eb445fcc0ab19366eb184e0a7bb2921912e2034b73842f6c7f" exitCode=0 Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.828052 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" event={"ID":"564b439c-0ac6-43d5-afa8-2379ea73a71e","Type":"ContainerDied","Data":"e8a761d8a23b75eb445fcc0ab19366eb184e0a7bb2921912e2034b73842f6c7f"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.840505 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" event={"ID":"4dc833de-df68-4794-9093-5698c85833cc","Type":"ContainerStarted","Data":"1b4635786244777d18cb8ac8017bc6b77538ad68022c26d86a3d7de642f287ef"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.856273 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-sg7pj" event={"ID":"90c8c074-7442-4155-a948-817b93fec221","Type":"ContainerStarted","Data":"45d280debed7f5aec9d48d4acb098017b854dfec3d96eb407407e5db32da29d6"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.857375 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.861172 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.361145372 +0000 UTC m=+106.945467702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.863970 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" event={"ID":"7c4e1e97-a3ae-414f-b12f-8b2463478934","Type":"ContainerStarted","Data":"a8d8eda9d9e2a8bf012d14e3b5305c1a0ab1d53e9de2dbd9660be9c265d75dbf"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.874326 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:00 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:00 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:00 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.874460 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.880426 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" podStartSLOduration=78.88040019499999 podStartE2EDuration="1m18.880400195s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:00.879265707 +0000 UTC m=+106.463588037" watchObservedRunningTime="2025-12-05 12:11:00.880400195 +0000 UTC m=+106.464722525" Dec 05 12:11:00 crc kubenswrapper[4711]: W1205 12:11:00.881166 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebcee70b_d418_4bad_b7df_0c46f8cd7422.slice/crio-bdb273213ded4f1772632264b529ae50150c72a9e1c13c896d287acf75646817 WatchSource:0}: Error finding container bdb273213ded4f1772632264b529ae50150c72a9e1c13c896d287acf75646817: Status 404 returned error can't find the container with id bdb273213ded4f1772632264b529ae50150c72a9e1c13c896d287acf75646817 Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.882080 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" event={"ID":"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4","Type":"ContainerStarted","Data":"33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.883422 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.891114 4711 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-x86pf container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.891187 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" podUID="ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.896767 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hv9gb" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.899728 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-lrpw8" event={"ID":"d454c199-9738-4862-a556-094087bde5b1","Type":"ContainerStarted","Data":"cc55e26d495f6fa13b512b095f18c93a0fa3a3cc89603fcc7f737d3f51d57b73"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.900475 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-lrpw8" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.924645 4711 patch_prober.go:28] interesting pod/downloads-7954f5f757-lrpw8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.924967 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lrpw8" podUID="d454c199-9738-4862-a556-094087bde5b1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.925971 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" event={"ID":"50f42111-4c58-408b-b2be-f739d494ef28","Type":"ContainerStarted","Data":"6ac911eba5f4aeb920460a738e375aa702b1a8870e462010b82f5a2d3662b400"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.939921 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" event={"ID":"a9d5f602-e7a7-4f57-9696-0b020b8a9e3f","Type":"ContainerStarted","Data":"c043c18f655a752dc789faf03dfaf52eec5d7abc5e9f348d229898434598f5a1"} Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.960631 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" event={"ID":"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1","Type":"ContainerStarted","Data":"6d89148096b9bc9da81a9e7ed9f4a7c775635cc320a7b755ab3962900a35843a"} Dec 05 12:11:00 crc kubenswrapper[4711]: W1205 12:11:00.962101 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08003b99_9e38_4d7d_a60f_2397dda4f9a6.slice/crio-d01d3a856742c0e9e208c61b7b8614013160230247b3d2b14f55ac37f9439477 WatchSource:0}: Error finding container d01d3a856742c0e9e208c61b7b8614013160230247b3d2b14f55ac37f9439477: Status 404 returned error can't find the container with id d01d3a856742c0e9e208c61b7b8614013160230247b3d2b14f55ac37f9439477 Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.963066 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:00 crc kubenswrapper[4711]: I1205 12:11:00.964103 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" podStartSLOduration=78.964080872 podStartE2EDuration="1m18.964080872s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:00.962628416 +0000 UTC m=+106.546950736" watchObservedRunningTime="2025-12-05 12:11:00.964080872 +0000 UTC m=+106.548403202" Dec 05 12:11:00 crc kubenswrapper[4711]: E1205 12:11:00.965742 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.465727982 +0000 UTC m=+107.050050302 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.012785 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jncdg" podStartSLOduration=80.012762058 podStartE2EDuration="1m20.012762058s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:01.011010595 +0000 UTC m=+106.595332925" watchObservedRunningTime="2025-12-05 12:11:01.012762058 +0000 UTC m=+106.597084388" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.042221 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.045677 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-plvn8" podStartSLOduration=79.044415756 podStartE2EDuration="1m19.044415756s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:01.043848892 +0000 UTC m=+106.628171232" watchObservedRunningTime="2025-12-05 12:11:01.044415756 +0000 UTC m=+106.628738086" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.057193 4711 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-6lhz6 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.057252 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" podUID="04563888-6e73-437a-99b3-9dfa5662ff33" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.057468 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" event={"ID":"a4b80150-d70c-4bea-a687-46b452b82f6d","Type":"ContainerStarted","Data":"7dac5536ddb527fd81dadb3baa672b43e2a648389b94cea7de74d6fd7dd4715b"} Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.059399 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sr8f2" event={"ID":"7f652b9a-5eb6-4066-84f7-dc1a7e09f038","Type":"ContainerStarted","Data":"ecb53aa652ff3c5b20bece632564f5a94d77f136177b68122942732272fce01a"} Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.065100 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.066393 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.566372516 +0000 UTC m=+107.150694846 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.068187 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" event={"ID":"e4b27d33-ae80-497b-adc2-625f11662e4d","Type":"ContainerStarted","Data":"6bfe9a6e7382de67d2dd127b55a00b3e8fc8b23ece10d54bcf752de2da51bde7"} Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.075958 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cxbnt" event={"ID":"540b9374-4012-4532-9d68-ef686b30ba78","Type":"ContainerStarted","Data":"a094e89e7ec422a2c5687ab2cadf383db7c3ccb4416bc649e8e985453fd4dc4a"} Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.109459 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" event={"ID":"1a64a971-347f-47da-8e47-1443b9ffebcc","Type":"ContainerStarted","Data":"1cd3ccb33c87dcc4a557a55b9e0410c0cc1cf11fffece09ca9c28592bef8c688"} Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.123063 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-lrpw8" podStartSLOduration=79.123041448 podStartE2EDuration="1m19.123041448s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:01.083830064 +0000 UTC m=+106.668152394" watchObservedRunningTime="2025-12-05 12:11:01.123041448 +0000 UTC m=+106.707363778" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.135955 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fdd5c" podStartSLOduration=79.135932395 podStartE2EDuration="1m19.135932395s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:01.122795212 +0000 UTC m=+106.707117562" watchObservedRunningTime="2025-12-05 12:11:01.135932395 +0000 UTC m=+106.720254725" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.163784 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" event={"ID":"e92c758c-fe28-4c9e-8e73-fccd0ce4d7a5","Type":"ContainerStarted","Data":"77fff859af9589d3e0d958162a437c7767df95d2a9ee23252c41d76829170feb"} Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.163850 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.165268 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" podStartSLOduration=79.165237174 podStartE2EDuration="1m19.165237174s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:01.164825145 +0000 UTC m=+106.749147475" watchObservedRunningTime="2025-12-05 12:11:01.165237174 +0000 UTC m=+106.749559504" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.167394 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.167838 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.667806858 +0000 UTC m=+107.252129188 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.190712 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.215752 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" podStartSLOduration=79.215723116 podStartE2EDuration="1m19.215723116s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:01.208380675 +0000 UTC m=+106.792703005" watchObservedRunningTime="2025-12-05 12:11:01.215723116 +0000 UTC m=+106.800045446" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.271264 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.271780 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.771747293 +0000 UTC m=+107.356069623 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.273300 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sr9lg" podStartSLOduration=79.27328394 podStartE2EDuration="1m19.27328394s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:01.270862831 +0000 UTC m=+106.855185161" watchObservedRunningTime="2025-12-05 12:11:01.27328394 +0000 UTC m=+106.857606270" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.373388 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.376013 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.875992764 +0000 UTC m=+107.460315094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.482975 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.483967 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:01.983943307 +0000 UTC m=+107.568265637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.585205 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.585943 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.085926263 +0000 UTC m=+107.670248593 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.689101 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.689567 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.189550069 +0000 UTC m=+107.773872389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.747815 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-hv9gb"] Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.791060 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.791621 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.291420193 +0000 UTC m=+107.875742523 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.860161 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:01 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:01 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:01 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.860209 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.892893 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.907261 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.407231118 +0000 UTC m=+107.991553458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:01 crc kubenswrapper[4711]: I1205 12:11:01.917816 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:01 crc kubenswrapper[4711]: E1205 12:11:01.918488 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.418467664 +0000 UTC m=+108.002789994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.020818 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.021272 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.52125462 +0000 UTC m=+108.105576950 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.122175 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.122553 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.622541079 +0000 UTC m=+108.206863409 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.218921 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-sg7pj" event={"ID":"90c8c074-7442-4155-a948-817b93fec221","Type":"ContainerStarted","Data":"23b0d7dc60c44d110b84ca62db506b32a1f4e1709b04317e6e57544cc8a20aac"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.225020 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.225409 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.725391807 +0000 UTC m=+108.309714137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.248225 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-sg7pj" podStartSLOduration=7.248197547 podStartE2EDuration="7.248197547s" podCreationTimestamp="2025-12-05 12:10:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:02.247181192 +0000 UTC m=+107.831503522" watchObservedRunningTime="2025-12-05 12:11:02.248197547 +0000 UTC m=+107.832519877" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.278700 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" event={"ID":"e5514b5e-1f9a-42b8-8db2-56b5cb965f97","Type":"ContainerStarted","Data":"8e0d7f5946f98ec83b93d9cf57200d8d6a367bb30c74a35ae2ad0bc55a991faf"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.297073 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" event={"ID":"b31df7a1-216b-4579-ab74-96986ec8cb1d","Type":"ContainerStarted","Data":"a5c2386cc61d3e48795da32a290144b320f8a956bd15059efae62e752ec5cc4e"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.297503 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" event={"ID":"b31df7a1-216b-4579-ab74-96986ec8cb1d","Type":"ContainerStarted","Data":"b075378bf04872f26e2d9df39ab3ffede936e70a1e5ba70817afad948234eb11"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.315678 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-nr7l4" event={"ID":"0b2d3f2c-4c0c-4bcb-a2e2-eb57fac363dd","Type":"ContainerStarted","Data":"bab942b7bdc2de82c325960e8af064b2e5b9ab9892459e12354f270f2b7efd39"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.325623 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" event={"ID":"08003b99-9e38-4d7d-a60f-2397dda4f9a6","Type":"ContainerStarted","Data":"d01d3a856742c0e9e208c61b7b8614013160230247b3d2b14f55ac37f9439477"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.329373 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.329782 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.829768481 +0000 UTC m=+108.414090811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.329788 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lv82p" podStartSLOduration=80.329777401 podStartE2EDuration="1m20.329777401s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:02.328913001 +0000 UTC m=+107.913235331" watchObservedRunningTime="2025-12-05 12:11:02.329777401 +0000 UTC m=+107.914099731" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.341960 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" event={"ID":"1a64a971-347f-47da-8e47-1443b9ffebcc","Type":"ContainerStarted","Data":"9eb2a14888a34563f4678f6ceeb80268f4702849e325d0fa09f3ef6e4f1d7e72"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.358610 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" event={"ID":"04563888-6e73-437a-99b3-9dfa5662ff33","Type":"ContainerStarted","Data":"05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.360829 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-nr7l4" podStartSLOduration=7.360819474 podStartE2EDuration="7.360819474s" podCreationTimestamp="2025-12-05 12:10:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:02.352964342 +0000 UTC m=+107.937286672" watchObservedRunningTime="2025-12-05 12:11:02.360819474 +0000 UTC m=+107.945141804" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.372690 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.393585 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" event={"ID":"a4b80150-d70c-4bea-a687-46b452b82f6d","Type":"ContainerStarted","Data":"1ad9b49410fe634e1ec396e5f29abfad728966104ccdd96eebd3c5e8c96837db"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.394489 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wzcpg" podStartSLOduration=80.394477531 podStartE2EDuration="1m20.394477531s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:02.394251886 +0000 UTC m=+107.978574226" watchObservedRunningTime="2025-12-05 12:11:02.394477531 +0000 UTC m=+107.978799861" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.428321 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" event={"ID":"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09","Type":"ContainerStarted","Data":"958b1e116df1b3c77085626e055b00b519c5b1b1936006914ffc8a0b7602e6a1"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.428382 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" event={"ID":"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09","Type":"ContainerStarted","Data":"4fb942eaefd8a2e7c518126327478c66cba30d9d317eb752c106ed8ca39937af"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.430370 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.432635 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:02.932610079 +0000 UTC m=+108.516932409 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.455926 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" event={"ID":"13866508-4ae9-427a-a2c8-5444fe01b9d2","Type":"ContainerStarted","Data":"c41e5720dfcbb52f0cffcad69a981b7f7ffc8c166b1ff72157a944b90d24e7b2"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.480410 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" event={"ID":"0206d524-6952-4adb-a5a6-2dc7fc8a60a5","Type":"ContainerStarted","Data":"73829c23a86ff4d52b85a2e534c0145f805efdd7c52b66087e966295ca30174c"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.508310 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-xn94x" podStartSLOduration=81.508288068 podStartE2EDuration="1m21.508288068s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:02.507812456 +0000 UTC m=+108.092134806" watchObservedRunningTime="2025-12-05 12:11:02.508288068 +0000 UTC m=+108.092610398" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.517148 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" event={"ID":"e4b27d33-ae80-497b-adc2-625f11662e4d","Type":"ContainerStarted","Data":"669872c00ef27299b48bdbcf676fd1acc239e670622f2bb2c85839a7b0dbf58d"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.535496 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.535888 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.035872056 +0000 UTC m=+108.620194386 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.571658 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" event={"ID":"df146046-34ea-410b-a342-83bc374306d1","Type":"ContainerStarted","Data":"7671cdf15569d067cae7a8460f2675dbb3984409afadb2974763c0a94a5f1724"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.609523 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" event={"ID":"39bf8b25-13ec-4cf0-ba60-6159836e8f09","Type":"ContainerStarted","Data":"6df41bfcf30a93dda5d84d0f9d2223db9799cec104cb9d9932d619caa12bf7b2"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.609598 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" event={"ID":"39bf8b25-13ec-4cf0-ba60-6159836e8f09","Type":"ContainerStarted","Data":"cef5c643cd69c7b8d802ad367568d288817da7ab8869ce403f029bb5d28391b7"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.654182 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.655931 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.155901575 +0000 UTC m=+108.740223905 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.663170 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" event={"ID":"56f640a1-033c-4c5f-b8f7-70bad15eaed3","Type":"ContainerStarted","Data":"d197d661d1cdbefaf0971e8c2854c0f2c490622869a32961869e808f46385f71"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.721505 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-z9sq9" event={"ID":"1415693c-1ea2-42b6-8e90-5bebbb33db90","Type":"ContainerStarted","Data":"3590ce742dd7cb6efb4a3ee7e997c67861414e7d7240a7e63cb4f41383c0ed08"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.757740 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.758198 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.258184889 +0000 UTC m=+108.842507219 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.761576 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" event={"ID":"b3dbd950-893e-4a2f-866f-8c538e7371e0","Type":"ContainerStarted","Data":"01695d3c2c82e7020357c273d5120c21f6280726ec6c9f92f2bd34a909c45b94"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.795522 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" event={"ID":"51156375-440f-4e82-8fac-dea612cb45ed","Type":"ContainerStarted","Data":"3cf4544cc961747c244a2ead2662d08f01769aba869aa3d4e516d335655f010f"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.807713 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.807994 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.844642 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-q96td" podStartSLOduration=81.844622883 podStartE2EDuration="1m21.844622883s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:02.580951594 +0000 UTC m=+108.165273924" watchObservedRunningTime="2025-12-05 12:11:02.844622883 +0000 UTC m=+108.428945213" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.852323 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" podStartSLOduration=80.852296141 podStartE2EDuration="1m20.852296141s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:02.843927195 +0000 UTC m=+108.428249525" watchObservedRunningTime="2025-12-05 12:11:02.852296141 +0000 UTC m=+108.436618471" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.860128 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.861487 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.361471387 +0000 UTC m=+108.945793717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.872885 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cxbnt" event={"ID":"540b9374-4012-4532-9d68-ef686b30ba78","Type":"ContainerStarted","Data":"5520e5d9ad1f4aa24bf1e9479965961374c4252978f06f04f8b1332234949db8"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.885646 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:02 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:02 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:02 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.885741 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.892211 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.934392 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" event={"ID":"50f42111-4c58-408b-b2be-f739d494ef28","Type":"ContainerStarted","Data":"a336556e83bbc1a5ca143a18ef0b8dd21699e06bc0a18f76f29e803f7cf9d207"} Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.961910 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:02 crc kubenswrapper[4711]: E1205 12:11:02.962988 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.462966921 +0000 UTC m=+109.047289261 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:02 crc kubenswrapper[4711]: I1205 12:11:02.996249 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" event={"ID":"3845ed2f-d607-4ba6-9d74-5293cfa722fd","Type":"ContainerStarted","Data":"16efebb67fe7500ae051d80ff72d7fa2207ec5781409cdc1fe9ec188bcc68a8b"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.007088 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xvdpr" podStartSLOduration=81.007061475 podStartE2EDuration="1m21.007061475s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:02.999096099 +0000 UTC m=+108.583418449" watchObservedRunningTime="2025-12-05 12:11:03.007061475 +0000 UTC m=+108.591383795" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.067047 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.067722 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.567679684 +0000 UTC m=+109.152002044 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.096320 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" event={"ID":"7e280918-a8ee-40bf-84b3-6c2ee464003f","Type":"ContainerStarted","Data":"d813ad2644088f2d8c83120528a3f38e4d18de770ee272021d95706d91eeed8c"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.139827 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" event={"ID":"355f45bc-83fe-49ab-844e-f91a22b8f6fb","Type":"ContainerStarted","Data":"ea8f68d7b12caf61205296f52613f2980d8df42a1ac8763bfdbfab5f27d81273"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.141111 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.172665 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.173133 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.673116866 +0000 UTC m=+109.257439206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.174047 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-4stqf" event={"ID":"ebcee70b-d418-4bad-b7df-0c46f8cd7422","Type":"ContainerStarted","Data":"bdb273213ded4f1772632264b529ae50150c72a9e1c13c896d287acf75646817"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.174202 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.189643 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc" event={"ID":"e2342447-c2fa-4e43-8a78-acde9e70782a","Type":"ContainerStarted","Data":"9598e2392472050abe9a0da7a79b2b96660073d59d86283c7eea1da3c7515df0"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.221701 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-wz6zl" podStartSLOduration=81.221682728 podStartE2EDuration="1m21.221682728s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:03.182170898 +0000 UTC m=+108.766493228" watchObservedRunningTime="2025-12-05 12:11:03.221682728 +0000 UTC m=+108.806005058" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.221935 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" event={"ID":"4dc833de-df68-4794-9093-5698c85833cc","Type":"ContainerStarted","Data":"f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.221979 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.221982 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" podStartSLOduration=81.221977145 podStartE2EDuration="1m21.221977145s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:03.082817486 +0000 UTC m=+108.667139816" watchObservedRunningTime="2025-12-05 12:11:03.221977145 +0000 UTC m=+108.806299475" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.223616 4711 patch_prober.go:28] interesting pod/console-operator-58897d9998-4stqf container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.223676 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-4stqf" podUID="ebcee70b-d418-4bad-b7df-0c46f8cd7422" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.239126 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sr8f2" event={"ID":"7f652b9a-5eb6-4066-84f7-dc1a7e09f038","Type":"ContainerStarted","Data":"c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.239501 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.267805 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" event={"ID":"3566b1aa-5f58-4bc9-a6b1-446b91bb8fe1","Type":"ContainerStarted","Data":"c66ff1873f2df60f6d5111d9cf1d3985a4ef407744781212fe0b80985a15f52b"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.273954 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.275214 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.775179083 +0000 UTC m=+109.359501413 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.295305 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-4stqf" podStartSLOduration=81.295285887 podStartE2EDuration="1m21.295285887s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:03.2888991 +0000 UTC m=+108.873221430" watchObservedRunningTime="2025-12-05 12:11:03.295285887 +0000 UTC m=+108.879608217" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.350944 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" event={"ID":"450b40c8-78a2-4ac7-97ba-56b2b165c0eb","Type":"ContainerStarted","Data":"ae185ea358687f798eb2ae4f53f39e74a1344b3e2fe2999bc87775e4f5159804"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.365423 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" event={"ID":"fa5afbc1-d1a4-40c8-990d-72a8169d5072","Type":"ContainerStarted","Data":"2f718bf7b95f452ffc51c1bd80e93289abeeded985b36701d83213621a04ac17"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.368501 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" event={"ID":"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4","Type":"ContainerStarted","Data":"b9396ece302185f63839d68397e71256c95ed06d462f325aef3a63beac8a90bd"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.368527 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" event={"ID":"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4","Type":"ContainerStarted","Data":"9c8e3abe974fd8f06330425a913c7d5987b68cc33a4632265e89caef34e6eee3"} Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.371861 4711 patch_prober.go:28] interesting pod/downloads-7954f5f757-lrpw8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.372065 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lrpw8" podUID="d454c199-9738-4862-a556-094087bde5b1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.376044 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.377863 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.877850237 +0000 UTC m=+109.462172567 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.388970 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.404526 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.450684 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.478057 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.481797 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:03.981741679 +0000 UTC m=+109.566064019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.580700 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.581513 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:04.08149871 +0000 UTC m=+109.665821040 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.629146 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rfdvq" podStartSLOduration=81.62912168 podStartE2EDuration="1m21.62912168s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:03.442583077 +0000 UTC m=+109.026905407" watchObservedRunningTime="2025-12-05 12:11:03.62912168 +0000 UTC m=+109.213444010" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.685330 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.685798 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:04.185778183 +0000 UTC m=+109.770100513 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.786678 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.787129 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:04.287113153 +0000 UTC m=+109.871435483 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.870489 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:03 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:03 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:03 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.870555 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.889076 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.889615 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:04.389598921 +0000 UTC m=+109.973921251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.915609 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hcl2z" podStartSLOduration=81.91558837 podStartE2EDuration="1m21.91558837s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:03.815633093 +0000 UTC m=+109.399955443" watchObservedRunningTime="2025-12-05 12:11:03.91558837 +0000 UTC m=+109.499910700" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.916665 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" podStartSLOduration=81.916661307 podStartE2EDuration="1m21.916661307s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:03.905319377 +0000 UTC m=+109.489641717" watchObservedRunningTime="2025-12-05 12:11:03.916661307 +0000 UTC m=+109.500983637" Dec 05 12:11:03 crc kubenswrapper[4711]: I1205 12:11:03.995316 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:03 crc kubenswrapper[4711]: E1205 12:11:03.996117 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:04.496104658 +0000 UTC m=+110.080426988 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.097620 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:04 crc kubenswrapper[4711]: E1205 12:11:04.098080 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:04.598059254 +0000 UTC m=+110.182381584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.105235 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" podStartSLOduration=83.105205699 podStartE2EDuration="1m23.105205699s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:03.983910649 +0000 UTC m=+109.568232989" watchObservedRunningTime="2025-12-05 12:11:04.105205699 +0000 UTC m=+109.689528029" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.169759 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" podStartSLOduration=83.169733465 podStartE2EDuration="1m23.169733465s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:04.168489084 +0000 UTC m=+109.752811414" watchObservedRunningTime="2025-12-05 12:11:04.169733465 +0000 UTC m=+109.754055795" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.171678 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-sr8f2" podStartSLOduration=82.171669423 podStartE2EDuration="1m22.171669423s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:04.128662466 +0000 UTC m=+109.712984816" watchObservedRunningTime="2025-12-05 12:11:04.171669423 +0000 UTC m=+109.755991753" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.190923 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.199509 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:04 crc kubenswrapper[4711]: E1205 12:11:04.199902 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:04.699889296 +0000 UTC m=+110.284211626 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.300117 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/564b439c-0ac6-43d5-afa8-2379ea73a71e-config-volume\") pod \"564b439c-0ac6-43d5-afa8-2379ea73a71e\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.300186 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2r95d\" (UniqueName: \"kubernetes.io/projected/564b439c-0ac6-43d5-afa8-2379ea73a71e-kube-api-access-2r95d\") pod \"564b439c-0ac6-43d5-afa8-2379ea73a71e\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.300562 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.300668 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/564b439c-0ac6-43d5-afa8-2379ea73a71e-secret-volume\") pod \"564b439c-0ac6-43d5-afa8-2379ea73a71e\" (UID: \"564b439c-0ac6-43d5-afa8-2379ea73a71e\") " Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.301438 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/564b439c-0ac6-43d5-afa8-2379ea73a71e-config-volume" (OuterVolumeSpecName: "config-volume") pod "564b439c-0ac6-43d5-afa8-2379ea73a71e" (UID: "564b439c-0ac6-43d5-afa8-2379ea73a71e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:11:04 crc kubenswrapper[4711]: E1205 12:11:04.301579 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:04.801555154 +0000 UTC m=+110.385877484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.333018 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/564b439c-0ac6-43d5-afa8-2379ea73a71e-kube-api-access-2r95d" (OuterVolumeSpecName: "kube-api-access-2r95d") pod "564b439c-0ac6-43d5-afa8-2379ea73a71e" (UID: "564b439c-0ac6-43d5-afa8-2379ea73a71e"). InnerVolumeSpecName "kube-api-access-2r95d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.338560 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/564b439c-0ac6-43d5-afa8-2379ea73a71e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "564b439c-0ac6-43d5-afa8-2379ea73a71e" (UID: "564b439c-0ac6-43d5-afa8-2379ea73a71e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.406430 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.406593 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/564b439c-0ac6-43d5-afa8-2379ea73a71e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.406610 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2r95d\" (UniqueName: \"kubernetes.io/projected/564b439c-0ac6-43d5-afa8-2379ea73a71e-kube-api-access-2r95d\") on node \"crc\" DevicePath \"\"" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.406622 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/564b439c-0ac6-43d5-afa8-2379ea73a71e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:11:04 crc kubenswrapper[4711]: E1205 12:11:04.406922 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:04.906906064 +0000 UTC m=+110.491228394 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.432621 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" event={"ID":"450b40c8-78a2-4ac7-97ba-56b2b165c0eb","Type":"ContainerStarted","Data":"f55cbb3da615fc3bca49241bed57201d8fd21f6b7e05615ac92c358867375a52"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.432676 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55845" event={"ID":"450b40c8-78a2-4ac7-97ba-56b2b165c0eb","Type":"ContainerStarted","Data":"edf7fa156de11b9383895494082efba5d2ded62ffa546049d07a589e07024479"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.473123 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rn6fh" event={"ID":"3845ed2f-d607-4ba6-9d74-5293cfa722fd","Type":"ContainerStarted","Data":"2f6070338caba849aef6c146de8b3f04c3c7407a2a9333a8b86ef7600b766899"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.505723 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-4stqf" event={"ID":"ebcee70b-d418-4bad-b7df-0c46f8cd7422","Type":"ContainerStarted","Data":"61a63ab6b69c98218cfd9992e031d9fe5b7608ddf2e1a7ee273d9f218c10f59c"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.506985 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:04 crc kubenswrapper[4711]: E1205 12:11:04.508800 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.008767116 +0000 UTC m=+110.593089446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.534933 4711 generic.go:334] "Generic (PLEG): container finished" podID="56f640a1-033c-4c5f-b8f7-70bad15eaed3" containerID="7d8d3557f909ebec014ade0a5ede8f607f5277fbbe5b23dd81cd5ef110086792" exitCode=0 Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.535042 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" event={"ID":"56f640a1-033c-4c5f-b8f7-70bad15eaed3","Type":"ContainerDied","Data":"7d8d3557f909ebec014ade0a5ede8f607f5277fbbe5b23dd81cd5ef110086792"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.562711 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" event={"ID":"fa5afbc1-d1a4-40c8-990d-72a8169d5072","Type":"ContainerStarted","Data":"40cefcc46545061240540c7238e2b5699617431fa840f1201a5bf0359b833e2d"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.562767 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-hv9gb" event={"ID":"fa5afbc1-d1a4-40c8-990d-72a8169d5072","Type":"ContainerStarted","Data":"2e6b16fafa203bd65454d0bd86f837ff47f48049544a4a009980690318893129"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.594124 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zxjsn" event={"ID":"c8ec9977-60ff-48aa-a0d4-371b5ffbb2a4","Type":"ContainerStarted","Data":"8e16373f770d0a7e83529cbabe972121430fc055642ffea9e37fb70458270370"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.610172 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-hv9gb" podStartSLOduration=83.610149078 podStartE2EDuration="1m23.610149078s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:04.60697335 +0000 UTC m=+110.191295680" watchObservedRunningTime="2025-12-05 12:11:04.610149078 +0000 UTC m=+110.194471408" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.611732 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:04 crc kubenswrapper[4711]: E1205 12:11:04.612156 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.112139126 +0000 UTC m=+110.696461456 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.625974 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cxbnt" event={"ID":"540b9374-4012-4532-9d68-ef686b30ba78","Type":"ContainerStarted","Data":"e35ab7c8fb64355647fa686326406022bababff383a112e8c64ff055a08e710f"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.627585 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-cxbnt" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.678790 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-cxbnt" podStartSLOduration=9.678764534 podStartE2EDuration="9.678764534s" podCreationTimestamp="2025-12-05 12:10:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:04.677912733 +0000 UTC m=+110.262235083" watchObservedRunningTime="2025-12-05 12:11:04.678764534 +0000 UTC m=+110.263086864" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.704158 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" event={"ID":"b3dbd950-893e-4a2f-866f-8c538e7371e0","Type":"ContainerStarted","Data":"904e509cf6ae8e6a0b9427c9ae32b92be173200f154736e52f1c0b2c8d847998"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.713777 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:04 crc kubenswrapper[4711]: E1205 12:11:04.715106 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.215088137 +0000 UTC m=+110.799410467 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.717283 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" event={"ID":"0206d524-6952-4adb-a5a6-2dc7fc8a60a5","Type":"ContainerStarted","Data":"471388b1ff63c50ca1b7088d7b809dc3e9da48233126ae550f31435736dd2cc6"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.738649 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" event={"ID":"e5514b5e-1f9a-42b8-8db2-56b5cb965f97","Type":"ContainerStarted","Data":"ac538e6226bc19de1d90bcced75316648ee11fb88b02facd85f263b1f2fffbf7"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.750173 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" event={"ID":"2f11fc52-29a6-4b09-8768-2e9b1d9a4a09","Type":"ContainerStarted","Data":"83b0d5d759583e3722c4cf7e0fcc5b9c7cadee745312180e718405377976c723"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.762794 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" event={"ID":"08003b99-9e38-4d7d-a60f-2397dda4f9a6","Type":"ContainerStarted","Data":"247b0db04428f29ce4555492305721e432675455d9fadea8d765caa00415d01c"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.764089 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.774287 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.780609 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" event={"ID":"df146046-34ea-410b-a342-83bc374306d1","Type":"ContainerStarted","Data":"7c3fb3b8d381343067fbf66fde0a5024e48b38ed9f440b848845a634049a6261"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.797189 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" event={"ID":"39bf8b25-13ec-4cf0-ba60-6159836e8f09","Type":"ContainerStarted","Data":"61bbdf7752b7dcc08e1c8689a3588438ae39364b9bf05b172419f3a890dc13ee"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.798165 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.814473 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" event={"ID":"564b439c-0ac6-43d5-afa8-2379ea73a71e","Type":"ContainerDied","Data":"82feff36be0a451bc9c5811206eda114f8934bf951cce247c67f2c495a88cb50"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.814524 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82feff36be0a451bc9c5811206eda114f8934bf951cce247c67f2c495a88cb50" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.814638 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.819131 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:04 crc kubenswrapper[4711]: E1205 12:11:04.819551 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.319536433 +0000 UTC m=+110.903858753 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.829771 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc" event={"ID":"e2342447-c2fa-4e43-8a78-acde9e70782a","Type":"ContainerStarted","Data":"5ba8ee503e2e6ef7071c90aa37cd66e53b499758d5584caa42620b2dfbb23609"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.830202 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc" event={"ID":"e2342447-c2fa-4e43-8a78-acde9e70782a","Type":"ContainerStarted","Data":"e8995fdbee173d5dcc0381795c86cb7a79ac0b3353dc26c70942a48f3207ea5e"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.836933 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" event={"ID":"a4b80150-d70c-4bea-a687-46b452b82f6d","Type":"ContainerStarted","Data":"03720c91387bc855ad3c9dfaa604642aab862976a871ac4d38cb722bbc04ff5b"} Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.839753 4711 patch_prober.go:28] interesting pod/downloads-7954f5f757-lrpw8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.839825 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lrpw8" podUID="d454c199-9738-4862-a556-094087bde5b1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.857742 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wkn79" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.869719 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:04 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:04 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:04 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.869793 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.919998 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:04 crc kubenswrapper[4711]: E1205 12:11:04.922263 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.422232377 +0000 UTC m=+111.006554707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:04 crc kubenswrapper[4711]: I1205 12:11:04.983945 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" podStartSLOduration=83.983913543 podStartE2EDuration="1m23.983913543s" podCreationTimestamp="2025-12-05 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:04.96955006 +0000 UTC m=+110.553872390" watchObservedRunningTime="2025-12-05 12:11:04.983913543 +0000 UTC m=+110.568235873" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.022319 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.022777 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.522752997 +0000 UTC m=+111.107075327 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.073474 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-sszzb" podStartSLOduration=83.073452893 podStartE2EDuration="1m23.073452893s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:05.072641963 +0000 UTC m=+110.656964293" watchObservedRunningTime="2025-12-05 12:11:05.073452893 +0000 UTC m=+110.657775223" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.125186 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.125683 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.625662856 +0000 UTC m=+111.209985186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.232516 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.233068 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.733051275 +0000 UTC m=+111.317373615 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.257600 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-4stqf" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.334079 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.334320 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.834296912 +0000 UTC m=+111.418619242 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.348310 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z4kxc" podStartSLOduration=83.348285276 podStartE2EDuration="1m23.348285276s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:05.303399083 +0000 UTC m=+110.887721413" watchObservedRunningTime="2025-12-05 12:11:05.348285276 +0000 UTC m=+110.932607606" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.408037 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dhtx2" podStartSLOduration=83.408013394 podStartE2EDuration="1m23.408013394s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:05.357181594 +0000 UTC m=+110.941503924" watchObservedRunningTime="2025-12-05 12:11:05.408013394 +0000 UTC m=+110.992335714" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.410760 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-5gn9w" podStartSLOduration=83.410751071 podStartE2EDuration="1m23.410751071s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:05.406455936 +0000 UTC m=+110.990778286" watchObservedRunningTime="2025-12-05 12:11:05.410751071 +0000 UTC m=+110.995073401" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.436036 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.436685 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:05.936649157 +0000 UTC m=+111.520971487 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.441004 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" podStartSLOduration=83.440976714 podStartE2EDuration="1m23.440976714s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:05.440194755 +0000 UTC m=+111.024517085" watchObservedRunningTime="2025-12-05 12:11:05.440976714 +0000 UTC m=+111.025299044" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.464832 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-rkds8" podStartSLOduration=83.464809389 podStartE2EDuration="1m23.464809389s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:05.461641882 +0000 UTC m=+111.045964212" watchObservedRunningTime="2025-12-05 12:11:05.464809389 +0000 UTC m=+111.049131719" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.535964 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cxjlv" podStartSLOduration=83.535941177 podStartE2EDuration="1m23.535941177s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:05.534691007 +0000 UTC m=+111.119013327" watchObservedRunningTime="2025-12-05 12:11:05.535941177 +0000 UTC m=+111.120263507" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.537665 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.537820 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.037798053 +0000 UTC m=+111.622120383 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.537988 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.538327 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.038304426 +0000 UTC m=+111.622626756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.551792 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tl7dg"] Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.552033 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564b439c-0ac6-43d5-afa8-2379ea73a71e" containerName="collect-profiles" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.552049 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="564b439c-0ac6-43d5-afa8-2379ea73a71e" containerName="collect-profiles" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.552147 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="564b439c-0ac6-43d5-afa8-2379ea73a71e" containerName="collect-profiles" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.552871 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.557553 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.572329 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tl7dg"] Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.641774 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.642092 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-catalog-content\") pod \"community-operators-tl7dg\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.642171 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt6dw\" (UniqueName: \"kubernetes.io/projected/94731d22-9a75-438c-88c1-3cc84818e7e3-kube-api-access-tt6dw\") pod \"community-operators-tl7dg\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.642231 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-utilities\") pod \"community-operators-tl7dg\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.642457 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.142423234 +0000 UTC m=+111.726745564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.742394 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6wmqx"] Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.743277 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt6dw\" (UniqueName: \"kubernetes.io/projected/94731d22-9a75-438c-88c1-3cc84818e7e3-kube-api-access-tt6dw\") pod \"community-operators-tl7dg\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.743351 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-utilities\") pod \"community-operators-tl7dg\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.743450 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.743517 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.743544 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-catalog-content\") pod \"community-operators-tl7dg\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.743973 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.243952639 +0000 UTC m=+111.828274969 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.744428 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-catalog-content\") pod \"community-operators-tl7dg\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.744621 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-utilities\") pod \"community-operators-tl7dg\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: W1205 12:11:05.750752 4711 reflector.go:561] object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g": failed to list *v1.Secret: secrets "certified-operators-dockercfg-4rs5g" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.750845 4711 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"certified-operators-dockercfg-4rs5g\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"certified-operators-dockercfg-4rs5g\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.829796 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt6dw\" (UniqueName: \"kubernetes.io/projected/94731d22-9a75-438c-88c1-3cc84818e7e3-kube-api-access-tt6dw\") pod \"community-operators-tl7dg\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.840419 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6wmqx"] Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.845532 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.845736 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8w2t\" (UniqueName: \"kubernetes.io/projected/24a27655-eff1-4912-9f6c-42f7d9e68ee3-kube-api-access-m8w2t\") pod \"certified-operators-6wmqx\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.845788 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-catalog-content\") pod \"certified-operators-6wmqx\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.845809 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-utilities\") pod \"certified-operators-6wmqx\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.845947 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.345929534 +0000 UTC m=+111.930251864 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.866120 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.868897 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:05 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:05 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:05 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.868982 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.898568 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" event={"ID":"56f640a1-033c-4c5f-b8f7-70bad15eaed3","Type":"ContainerStarted","Data":"09ad1e9875e54c06b8fcb54933aa974a2a4157a8c6290f2b1f936425ea41ef4a"} Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.899476 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.931303 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" event={"ID":"df146046-34ea-410b-a342-83bc374306d1","Type":"ContainerStarted","Data":"3cace7300d5e897186d1b7c4236df9ac7a398628280d5a4ab916a80a1d5a6c97"} Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.931354 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" event={"ID":"df146046-34ea-410b-a342-83bc374306d1","Type":"ContainerStarted","Data":"df53c8338538ba2c49b5cd783b73b66edcea5fe1b53fc79ed2d14a50b6ade7ea"} Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.953399 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.953784 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8w2t\" (UniqueName: \"kubernetes.io/projected/24a27655-eff1-4912-9f6c-42f7d9e68ee3-kube-api-access-m8w2t\") pod \"certified-operators-6wmqx\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.953924 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-catalog-content\") pod \"certified-operators-6wmqx\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.954058 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-utilities\") pod \"certified-operators-6wmqx\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.954659 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-utilities\") pod \"certified-operators-6wmqx\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:05 crc kubenswrapper[4711]: E1205 12:11:05.955007 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.454994594 +0000 UTC m=+112.039316924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.955670 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-catalog-content\") pod \"certified-operators-6wmqx\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.956239 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" podStartSLOduration=83.956223165 podStartE2EDuration="1m23.956223165s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:05.954210846 +0000 UTC m=+111.538533176" watchObservedRunningTime="2025-12-05 12:11:05.956223165 +0000 UTC m=+111.540545495" Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.978220 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fct8z"] Dec 05 12:11:05 crc kubenswrapper[4711]: I1205 12:11:05.980038 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.004612 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8w2t\" (UniqueName: \"kubernetes.io/projected/24a27655-eff1-4912-9f6c-42f7d9e68ee3-kube-api-access-m8w2t\") pod \"certified-operators-6wmqx\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.013005 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fct8z"] Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.055284 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.076056 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-944r8\" (UniqueName: \"kubernetes.io/projected/d9a036a6-710b-4d06-bdd1-05c493228cf0-kube-api-access-944r8\") pod \"community-operators-fct8z\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: E1205 12:11:06.076484 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.576457519 +0000 UTC m=+112.160779849 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.076618 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.076868 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-utilities\") pod \"community-operators-fct8z\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.076897 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-catalog-content\") pod \"community-operators-fct8z\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: E1205 12:11:06.079126 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.579114245 +0000 UTC m=+112.163436575 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.108526 4711 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.136198 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t6fkc"] Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.137328 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.166106 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t6fkc"] Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.180082 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:06 crc kubenswrapper[4711]: E1205 12:11:06.183794 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.683753577 +0000 UTC m=+112.268075907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.185644 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-944r8\" (UniqueName: \"kubernetes.io/projected/d9a036a6-710b-4d06-bdd1-05c493228cf0-kube-api-access-944r8\") pod \"community-operators-fct8z\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.185804 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.185887 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-utilities\") pod \"community-operators-fct8z\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.185902 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-catalog-content\") pod \"community-operators-fct8z\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.186545 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-catalog-content\") pod \"community-operators-fct8z\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: E1205 12:11:06.188435 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.68839193 +0000 UTC m=+112.272714430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m7fx2" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.189051 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-utilities\") pod \"community-operators-fct8z\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.231893 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-944r8\" (UniqueName: \"kubernetes.io/projected/d9a036a6-710b-4d06-bdd1-05c493228cf0-kube-api-access-944r8\") pod \"community-operators-fct8z\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.272239 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.273143 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.282613 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.282816 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.296755 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.298085 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.298329 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpn7t\" (UniqueName: \"kubernetes.io/projected/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-kube-api-access-dpn7t\") pod \"certified-operators-t6fkc\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.298398 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-catalog-content\") pod \"certified-operators-t6fkc\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.298477 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-utilities\") pod \"certified-operators-t6fkc\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: E1205 12:11:06.298616 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:11:06.798596818 +0000 UTC m=+112.382919148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.352437 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.356342 4711 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T12:11:06.108833395Z","Handler":null,"Name":""} Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.387189 4711 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.387247 4711 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.399676 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-utilities\") pod \"certified-operators-t6fkc\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.399732 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6f7ac09-af6e-42c6-a723-2465fabb7465-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b6f7ac09-af6e-42c6-a723-2465fabb7465\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.399780 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.399827 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpn7t\" (UniqueName: \"kubernetes.io/projected/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-kube-api-access-dpn7t\") pod \"certified-operators-t6fkc\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.399881 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-catalog-content\") pod \"certified-operators-t6fkc\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.399921 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6f7ac09-af6e-42c6-a723-2465fabb7465-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b6f7ac09-af6e-42c6-a723-2465fabb7465\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.400464 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-utilities\") pod \"certified-operators-t6fkc\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.401171 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-catalog-content\") pod \"certified-operators-t6fkc\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.411533 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.411619 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.432003 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpn7t\" (UniqueName: \"kubernetes.io/projected/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-kube-api-access-dpn7t\") pod \"certified-operators-t6fkc\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.447452 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m7fx2\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.500906 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.501254 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6f7ac09-af6e-42c6-a723-2465fabb7465-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b6f7ac09-af6e-42c6-a723-2465fabb7465\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.501321 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6f7ac09-af6e-42c6-a723-2465fabb7465-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b6f7ac09-af6e-42c6-a723-2465fabb7465\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.501393 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6f7ac09-af6e-42c6-a723-2465fabb7465-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b6f7ac09-af6e-42c6-a723-2465fabb7465\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.521511 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.524940 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.537903 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6f7ac09-af6e-42c6-a723-2465fabb7465-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b6f7ac09-af6e-42c6-a723-2465fabb7465\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.606518 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tl7dg"] Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.620091 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.623113 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.628567 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.629970 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.719645 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.798266 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fct8z"] Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.862665 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:06 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:06 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:06 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:06 crc kubenswrapper[4711]: I1205 12:11:06.862775 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:06 crc kubenswrapper[4711]: W1205 12:11:06.873606 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9a036a6_710b_4d06_bdd1_05c493228cf0.slice/crio-4ee8185357af8c761c5da4f35c54e6750803bd8fefbcf19757670c6a1d19829b WatchSource:0}: Error finding container 4ee8185357af8c761c5da4f35c54e6750803bd8fefbcf19757670c6a1d19829b: Status 404 returned error can't find the container with id 4ee8185357af8c761c5da4f35c54e6750803bd8fefbcf19757670c6a1d19829b Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.005858 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fct8z" event={"ID":"d9a036a6-710b-4d06-bdd1-05c493228cf0","Type":"ContainerStarted","Data":"4ee8185357af8c761c5da4f35c54e6750803bd8fefbcf19757670c6a1d19829b"} Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.031272 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" event={"ID":"df146046-34ea-410b-a342-83bc374306d1","Type":"ContainerStarted","Data":"ca76060ca04f305691ec94b870322b558ca31ee69bab78ca67f58587d770b038"} Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.056783 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl7dg" event={"ID":"94731d22-9a75-438c-88c1-3cc84818e7e3","Type":"ContainerStarted","Data":"6855f94635ae61214b390b03d74fa09728e1ffc284868239dfdaf988dca96ed7"} Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.075178 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" podStartSLOduration=12.07515707 podStartE2EDuration="12.07515707s" podCreationTimestamp="2025-12-05 12:10:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:07.073526451 +0000 UTC m=+112.657848791" watchObservedRunningTime="2025-12-05 12:11:07.07515707 +0000 UTC m=+112.659479400" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.132445 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m7fx2"] Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.245895 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p4qj4" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.346563 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.614846 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6wmqx"] Dec 05 12:11:07 crc kubenswrapper[4711]: W1205 12:11:07.623130 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24a27655_eff1_4912_9f6c_42f7d9e68ee3.slice/crio-b5b21a6bb13fac7abb21065adde39b67f9c88c21a9de57fe84f7c523c8ece1f2 WatchSource:0}: Error finding container b5b21a6bb13fac7abb21065adde39b67f9c88c21a9de57fe84f7c523c8ece1f2: Status 404 returned error can't find the container with id b5b21a6bb13fac7abb21065adde39b67f9c88c21a9de57fe84f7c523c8ece1f2 Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.639778 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t6fkc"] Dec 05 12:11:07 crc kubenswrapper[4711]: W1205 12:11:07.647935 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8d3db38_6cb9_472a_8c5e_94eaf39e4703.slice/crio-786eab04d7634a4ec6f0d13b1446ede7d835436d1d5fd57b8b097f4365393136 WatchSource:0}: Error finding container 786eab04d7634a4ec6f0d13b1446ede7d835436d1d5fd57b8b097f4365393136: Status 404 returned error can't find the container with id 786eab04d7634a4ec6f0d13b1446ede7d835436d1d5fd57b8b097f4365393136 Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.771455 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.773011 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.776051 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.776297 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.778066 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.778966 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.782994 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.787019 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.834402 4711 patch_prober.go:28] interesting pod/downloads-7954f5f757-lrpw8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.834480 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lrpw8" podUID="d454c199-9738-4862-a556-094087bde5b1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.834519 4711 patch_prober.go:28] interesting pod/downloads-7954f5f757-lrpw8 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.834576 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-lrpw8" podUID="d454c199-9738-4862-a556-094087bde5b1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.839237 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.839301 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.854535 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.859458 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:07 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:07 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:07 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.859514 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.928682 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5n4h8"] Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.929854 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.933260 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.939747 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5n4h8"] Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.943080 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.943166 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:07 crc kubenswrapper[4711]: I1205 12:11:07.944274 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.012062 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.044558 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-utilities\") pod \"redhat-marketplace-5n4h8\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.045087 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nftp\" (UniqueName: \"kubernetes.io/projected/a7f95e23-8ec1-48cb-b206-123e5426d705-kube-api-access-6nftp\") pod \"redhat-marketplace-5n4h8\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.045133 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-catalog-content\") pod \"redhat-marketplace-5n4h8\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.062796 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" event={"ID":"259a4e64-67b8-417e-8948-4cc028bb728d","Type":"ContainerStarted","Data":"0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.062876 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" event={"ID":"259a4e64-67b8-417e-8948-4cc028bb728d","Type":"ContainerStarted","Data":"a116c2f9df17bd018bf211a6d1e28ec1cc228b4a14dd7be37e9be97e204565f1"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.062945 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.064741 4711 generic.go:334] "Generic (PLEG): container finished" podID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerID="fab15334d942c5c8e154f3d25329f8e3fc4d3bc88489d3c262592b95e0745203" exitCode=0 Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.064846 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl7dg" event={"ID":"94731d22-9a75-438c-88c1-3cc84818e7e3","Type":"ContainerDied","Data":"fab15334d942c5c8e154f3d25329f8e3fc4d3bc88489d3c262592b95e0745203"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.067490 4711 generic.go:334] "Generic (PLEG): container finished" podID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerID="9a7c14240545ae070b37df7ca60883806b13479ef650aa39abe07e895d2a8fc6" exitCode=0 Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.067539 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fct8z" event={"ID":"d9a036a6-710b-4d06-bdd1-05c493228cf0","Type":"ContainerDied","Data":"9a7c14240545ae070b37df7ca60883806b13479ef650aa39abe07e895d2a8fc6"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.068486 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.071824 4711 generic.go:334] "Generic (PLEG): container finished" podID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerID="100dfa4c05ee382ce23fa274fcbbe3e27e18cfcbbc2015f79d257f204d2af23d" exitCode=0 Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.071879 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6wmqx" event={"ID":"24a27655-eff1-4912-9f6c-42f7d9e68ee3","Type":"ContainerDied","Data":"100dfa4c05ee382ce23fa274fcbbe3e27e18cfcbbc2015f79d257f204d2af23d"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.071907 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6wmqx" event={"ID":"24a27655-eff1-4912-9f6c-42f7d9e68ee3","Type":"ContainerStarted","Data":"b5b21a6bb13fac7abb21065adde39b67f9c88c21a9de57fe84f7c523c8ece1f2"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.076317 4711 generic.go:334] "Generic (PLEG): container finished" podID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerID="2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4" exitCode=0 Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.076461 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6fkc" event={"ID":"c8d3db38-6cb9-472a-8c5e-94eaf39e4703","Type":"ContainerDied","Data":"2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.076556 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6fkc" event={"ID":"c8d3db38-6cb9-472a-8c5e-94eaf39e4703","Type":"ContainerStarted","Data":"786eab04d7634a4ec6f0d13b1446ede7d835436d1d5fd57b8b097f4365393136"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.082675 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b6f7ac09-af6e-42c6-a723-2465fabb7465","Type":"ContainerStarted","Data":"82422cc4955a64f66b04668ced29a4e9a09245e5d31dabc7bdbf56790165b8aa"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.082732 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b6f7ac09-af6e-42c6-a723-2465fabb7465","Type":"ContainerStarted","Data":"06894a7d1e2532f63f6c80b0100ba063f9ed71eaeb5c1ed7839cdbd0699ff4f0"} Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.087179 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" podStartSLOduration=86.087157839 podStartE2EDuration="1m26.087157839s" podCreationTimestamp="2025-12-05 12:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:08.085031467 +0000 UTC m=+113.669353787" watchObservedRunningTime="2025-12-05 12:11:08.087157839 +0000 UTC m=+113.671480169" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.090693 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.094992 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-t5m9v" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.142445 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.142419647 podStartE2EDuration="2.142419647s" podCreationTimestamp="2025-12-05 12:11:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:08.135104127 +0000 UTC m=+113.719426457" watchObservedRunningTime="2025-12-05 12:11:08.142419647 +0000 UTC m=+113.726741977" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.148588 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nftp\" (UniqueName: \"kubernetes.io/projected/a7f95e23-8ec1-48cb-b206-123e5426d705-kube-api-access-6nftp\") pod \"redhat-marketplace-5n4h8\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.148760 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-catalog-content\") pod \"redhat-marketplace-5n4h8\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.149793 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-utilities\") pod \"redhat-marketplace-5n4h8\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.150751 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-catalog-content\") pod \"redhat-marketplace-5n4h8\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.151745 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-utilities\") pod \"redhat-marketplace-5n4h8\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.179867 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nftp\" (UniqueName: \"kubernetes.io/projected/a7f95e23-8ec1-48cb-b206-123e5426d705-kube-api-access-6nftp\") pod \"redhat-marketplace-5n4h8\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.250803 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.346037 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-75gn6"] Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.358900 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.362839 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-75gn6"] Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.453713 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.454376 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blltx\" (UniqueName: \"kubernetes.io/projected/d4613689-1783-4370-9d69-a945cde59468-kube-api-access-blltx\") pod \"redhat-marketplace-75gn6\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.454641 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-catalog-content\") pod \"redhat-marketplace-75gn6\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.454663 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-utilities\") pod \"redhat-marketplace-75gn6\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.556527 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blltx\" (UniqueName: \"kubernetes.io/projected/d4613689-1783-4370-9d69-a945cde59468-kube-api-access-blltx\") pod \"redhat-marketplace-75gn6\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.557036 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-catalog-content\") pod \"redhat-marketplace-75gn6\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.557073 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-utilities\") pod \"redhat-marketplace-75gn6\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.558904 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-utilities\") pod \"redhat-marketplace-75gn6\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.558951 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-catalog-content\") pod \"redhat-marketplace-75gn6\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.586711 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blltx\" (UniqueName: \"kubernetes.io/projected/d4613689-1783-4370-9d69-a945cde59468-kube-api-access-blltx\") pod \"redhat-marketplace-75gn6\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.681858 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5n4h8"] Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.689232 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.697988 4711 patch_prober.go:28] interesting pod/console-f9d7485db-sr8f2 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.698049 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-sr8f2" podUID="7f652b9a-5eb6-4066-84f7-dc1a7e09f038" containerName="console" probeResult="failure" output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.756295 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.756352 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.767104 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qrks9"] Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.769738 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.772208 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.799768 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qrks9"] Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.858537 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:08 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:08 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:08 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.859139 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.865646 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-utilities\") pod \"redhat-operators-qrks9\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.865746 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9qpn\" (UniqueName: \"kubernetes.io/projected/be44c776-1254-4da5-8bb4-e5b4c552a26f-kube-api-access-v9qpn\") pod \"redhat-operators-qrks9\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.865830 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-catalog-content\") pod \"redhat-operators-qrks9\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.944195 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lfx27"] Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.945328 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.961782 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lfx27"] Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.970327 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9ztx\" (UniqueName: \"kubernetes.io/projected/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-kube-api-access-w9ztx\") pod \"redhat-operators-lfx27\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.970392 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-utilities\") pod \"redhat-operators-lfx27\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.970498 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9qpn\" (UniqueName: \"kubernetes.io/projected/be44c776-1254-4da5-8bb4-e5b4c552a26f-kube-api-access-v9qpn\") pod \"redhat-operators-qrks9\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.970532 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-catalog-content\") pod \"redhat-operators-lfx27\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.970574 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-catalog-content\") pod \"redhat-operators-qrks9\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.970736 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-utilities\") pod \"redhat-operators-qrks9\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.971095 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-catalog-content\") pod \"redhat-operators-qrks9\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.971161 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-utilities\") pod \"redhat-operators-qrks9\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:08 crc kubenswrapper[4711]: I1205 12:11:08.991738 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9qpn\" (UniqueName: \"kubernetes.io/projected/be44c776-1254-4da5-8bb4-e5b4c552a26f-kube-api-access-v9qpn\") pod \"redhat-operators-qrks9\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.076109 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-catalog-content\") pod \"redhat-operators-lfx27\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.076251 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9ztx\" (UniqueName: \"kubernetes.io/projected/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-kube-api-access-w9ztx\") pod \"redhat-operators-lfx27\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.076280 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-utilities\") pod \"redhat-operators-lfx27\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.077255 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-utilities\") pod \"redhat-operators-lfx27\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.077306 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-catalog-content\") pod \"redhat-operators-lfx27\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.100794 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9ztx\" (UniqueName: \"kubernetes.io/projected/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-kube-api-access-w9ztx\") pod \"redhat-operators-lfx27\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.103989 4711 generic.go:334] "Generic (PLEG): container finished" podID="b6f7ac09-af6e-42c6-a723-2465fabb7465" containerID="82422cc4955a64f66b04668ced29a4e9a09245e5d31dabc7bdbf56790165b8aa" exitCode=0 Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.104098 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b6f7ac09-af6e-42c6-a723-2465fabb7465","Type":"ContainerDied","Data":"82422cc4955a64f66b04668ced29a4e9a09245e5d31dabc7bdbf56790165b8aa"} Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.112873 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-75gn6"] Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.114781 4711 generic.go:334] "Generic (PLEG): container finished" podID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerID="901e6789a483035b3ccd0ca89e568166c609534add984ccdc9dffd41df4e1019" exitCode=0 Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.114867 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5n4h8" event={"ID":"a7f95e23-8ec1-48cb-b206-123e5426d705","Type":"ContainerDied","Data":"901e6789a483035b3ccd0ca89e568166c609534add984ccdc9dffd41df4e1019"} Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.114904 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5n4h8" event={"ID":"a7f95e23-8ec1-48cb-b206-123e5426d705","Type":"ContainerStarted","Data":"07f95c7ac46d02eb7d4a54fe32f56b7fe9f96abacf312dd98de8aceaa5a0f399"} Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.140759 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ae4f01f2-ed5e-4462-b048-0f380551fdd7","Type":"ContainerStarted","Data":"fe9ddffca07ad2b70f9dbe8dbfbb3d87977aab19c940f526ebd41850addae525"} Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.140817 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ae4f01f2-ed5e-4462-b048-0f380551fdd7","Type":"ContainerStarted","Data":"0812f823988c81b6493ef8122edb686a6de6c05492c776ecd13c5dc403c0dd9e"} Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.174317 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.284263 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.568989 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.56890213 podStartE2EDuration="2.56890213s" podCreationTimestamp="2025-12-05 12:11:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:09.177610345 +0000 UTC m=+114.761932675" watchObservedRunningTime="2025-12-05 12:11:09.56890213 +0000 UTC m=+115.153224470" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.575119 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qrks9"] Dec 05 12:11:09 crc kubenswrapper[4711]: W1205 12:11:09.630533 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe44c776_1254_4da5_8bb4_e5b4c552a26f.slice/crio-4b655ee0b430dad54336b6b0769d2048269e581dc1fa3d35ca1687242fe26b70 WatchSource:0}: Error finding container 4b655ee0b430dad54336b6b0769d2048269e581dc1fa3d35ca1687242fe26b70: Status 404 returned error can't find the container with id 4b655ee0b430dad54336b6b0769d2048269e581dc1fa3d35ca1687242fe26b70 Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.861414 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:09 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:09 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:09 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.861471 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:09 crc kubenswrapper[4711]: I1205 12:11:09.956295 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lfx27"] Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.237231 4711 generic.go:334] "Generic (PLEG): container finished" podID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerID="49d69e782990b16ccd412814cf92092078ef32872d26b00a13a8430965bc9082" exitCode=0 Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.237875 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrks9" event={"ID":"be44c776-1254-4da5-8bb4-e5b4c552a26f","Type":"ContainerDied","Data":"49d69e782990b16ccd412814cf92092078ef32872d26b00a13a8430965bc9082"} Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.237921 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrks9" event={"ID":"be44c776-1254-4da5-8bb4-e5b4c552a26f","Type":"ContainerStarted","Data":"4b655ee0b430dad54336b6b0769d2048269e581dc1fa3d35ca1687242fe26b70"} Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.241421 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfx27" event={"ID":"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72","Type":"ContainerStarted","Data":"f3d6414ad00365dd2cf73a112d547dc4546882e05a34009d7ac03d5dc0de0123"} Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.248855 4711 generic.go:334] "Generic (PLEG): container finished" podID="ae4f01f2-ed5e-4462-b048-0f380551fdd7" containerID="fe9ddffca07ad2b70f9dbe8dbfbb3d87977aab19c940f526ebd41850addae525" exitCode=0 Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.248956 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ae4f01f2-ed5e-4462-b048-0f380551fdd7","Type":"ContainerDied","Data":"fe9ddffca07ad2b70f9dbe8dbfbb3d87977aab19c940f526ebd41850addae525"} Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.326151 4711 generic.go:334] "Generic (PLEG): container finished" podID="d4613689-1783-4370-9d69-a945cde59468" containerID="610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e" exitCode=0 Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.327934 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-75gn6" event={"ID":"d4613689-1783-4370-9d69-a945cde59468","Type":"ContainerDied","Data":"610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e"} Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.328002 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-75gn6" event={"ID":"d4613689-1783-4370-9d69-a945cde59468","Type":"ContainerStarted","Data":"19d1c7e1b5c68411e257cc730a40f83146b33e5c61728421a572661444177483"} Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.789749 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.838629 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6f7ac09-af6e-42c6-a723-2465fabb7465-kubelet-dir\") pod \"b6f7ac09-af6e-42c6-a723-2465fabb7465\" (UID: \"b6f7ac09-af6e-42c6-a723-2465fabb7465\") " Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.839286 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6f7ac09-af6e-42c6-a723-2465fabb7465-kube-api-access\") pod \"b6f7ac09-af6e-42c6-a723-2465fabb7465\" (UID: \"b6f7ac09-af6e-42c6-a723-2465fabb7465\") " Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.840128 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b6f7ac09-af6e-42c6-a723-2465fabb7465-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b6f7ac09-af6e-42c6-a723-2465fabb7465" (UID: "b6f7ac09-af6e-42c6-a723-2465fabb7465"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.864128 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6f7ac09-af6e-42c6-a723-2465fabb7465-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b6f7ac09-af6e-42c6-a723-2465fabb7465" (UID: "b6f7ac09-af6e-42c6-a723-2465fabb7465"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.881781 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:10 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:10 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:10 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.881885 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.940791 4711 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6f7ac09-af6e-42c6-a723-2465fabb7465-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:11:10 crc kubenswrapper[4711]: I1205 12:11:10.940825 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6f7ac09-af6e-42c6-a723-2465fabb7465-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.336765 4711 generic.go:334] "Generic (PLEG): container finished" podID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerID="9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f" exitCode=0 Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.336869 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfx27" event={"ID":"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72","Type":"ContainerDied","Data":"9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f"} Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.341459 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.342268 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b6f7ac09-af6e-42c6-a723-2465fabb7465","Type":"ContainerDied","Data":"06894a7d1e2532f63f6c80b0100ba063f9ed71eaeb5c1ed7839cdbd0699ff4f0"} Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.342324 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06894a7d1e2532f63f6c80b0100ba063f9ed71eaeb5c1ed7839cdbd0699ff4f0" Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.613045 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.655227 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.761463 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kube-api-access\") pod \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\" (UID: \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\") " Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.762574 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kubelet-dir\") pod \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\" (UID: \"ae4f01f2-ed5e-4462-b048-0f380551fdd7\") " Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.762727 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ae4f01f2-ed5e-4462-b048-0f380551fdd7" (UID: "ae4f01f2-ed5e-4462-b048-0f380551fdd7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.764337 4711 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.774617 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ae4f01f2-ed5e-4462-b048-0f380551fdd7" (UID: "ae4f01f2-ed5e-4462-b048-0f380551fdd7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.858316 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:11 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:11 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:11 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.858405 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:11 crc kubenswrapper[4711]: I1205 12:11:11.865613 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ae4f01f2-ed5e-4462-b048-0f380551fdd7-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:11:12 crc kubenswrapper[4711]: I1205 12:11:12.351966 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ae4f01f2-ed5e-4462-b048-0f380551fdd7","Type":"ContainerDied","Data":"0812f823988c81b6493ef8122edb686a6de6c05492c776ecd13c5dc403c0dd9e"} Dec 05 12:11:12 crc kubenswrapper[4711]: I1205 12:11:12.352014 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0812f823988c81b6493ef8122edb686a6de6c05492c776ecd13c5dc403c0dd9e" Dec 05 12:11:12 crc kubenswrapper[4711]: I1205 12:11:12.352077 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:11:12 crc kubenswrapper[4711]: I1205 12:11:12.862148 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:12 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:12 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:12 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:12 crc kubenswrapper[4711]: I1205 12:11:12.862298 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:13 crc kubenswrapper[4711]: I1205 12:11:13.138735 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-cxbnt" Dec 05 12:11:13 crc kubenswrapper[4711]: I1205 12:11:13.857769 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:13 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:13 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:13 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:13 crc kubenswrapper[4711]: I1205 12:11:13.857949 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:14 crc kubenswrapper[4711]: I1205 12:11:14.859561 4711 patch_prober.go:28] interesting pod/router-default-5444994796-2f9kw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:11:14 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Dec 05 12:11:14 crc kubenswrapper[4711]: [+]process-running ok Dec 05 12:11:14 crc kubenswrapper[4711]: healthz check failed Dec 05 12:11:14 crc kubenswrapper[4711]: I1205 12:11:14.860683 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2f9kw" podUID="39063f31-28f4-4464-b29e-2589d64907bc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:11:15 crc kubenswrapper[4711]: I1205 12:11:15.858587 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:11:15 crc kubenswrapper[4711]: I1205 12:11:15.863082 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-2f9kw" Dec 05 12:11:17 crc kubenswrapper[4711]: I1205 12:11:17.837038 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-lrpw8" Dec 05 12:11:18 crc kubenswrapper[4711]: I1205 12:11:18.701380 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:11:18 crc kubenswrapper[4711]: I1205 12:11:18.715501 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:11:26 crc kubenswrapper[4711]: I1205 12:11:26.527536 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:11:38 crc kubenswrapper[4711]: E1205 12:11:38.143097 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 12:11:38 crc kubenswrapper[4711]: E1205 12:11:38.144148 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dpn7t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-t6fkc_openshift-marketplace(c8d3db38-6cb9-472a-8c5e-94eaf39e4703): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:11:38 crc kubenswrapper[4711]: E1205 12:11:38.145342 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-t6fkc" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" Dec 05 12:11:38 crc kubenswrapper[4711]: I1205 12:11:38.720600 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q4s7j" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.205889 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-t6fkc" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.272745 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.272990 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tt6dw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-tl7dg_openshift-marketplace(94731d22-9a75-438c-88c1-3cc84818e7e3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.274168 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-tl7dg" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.360326 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.360573 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-944r8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-fct8z_openshift-marketplace(d9a036a6-710b-4d06-bdd1-05c493228cf0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.361752 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-fct8z" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.368348 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.368590 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m8w2t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-6wmqx_openshift-marketplace(24a27655-eff1-4912-9f6c-42f7d9e68ee3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:11:41 crc kubenswrapper[4711]: E1205 12:11:41.370536 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-6wmqx" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" Dec 05 12:11:44 crc kubenswrapper[4711]: E1205 12:11:44.953027 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-6wmqx" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" Dec 05 12:11:44 crc kubenswrapper[4711]: E1205 12:11:44.953027 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-tl7dg" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" Dec 05 12:11:44 crc kubenswrapper[4711]: E1205 12:11:44.953118 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-fct8z" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" Dec 05 12:11:46 crc kubenswrapper[4711]: E1205 12:11:46.255496 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 12:11:46 crc kubenswrapper[4711]: E1205 12:11:46.256983 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6nftp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-5n4h8_openshift-marketplace(a7f95e23-8ec1-48cb-b206-123e5426d705): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:11:46 crc kubenswrapper[4711]: E1205 12:11:46.258746 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-5n4h8" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" Dec 05 12:11:46 crc kubenswrapper[4711]: E1205 12:11:46.283513 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 12:11:46 crc kubenswrapper[4711]: E1205 12:11:46.283747 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-blltx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-75gn6_openshift-marketplace(d4613689-1783-4370-9d69-a945cde59468): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:11:46 crc kubenswrapper[4711]: E1205 12:11:46.285056 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-75gn6" podUID="d4613689-1783-4370-9d69-a945cde59468" Dec 05 12:11:46 crc kubenswrapper[4711]: I1205 12:11:46.642312 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrks9" event={"ID":"be44c776-1254-4da5-8bb4-e5b4c552a26f","Type":"ContainerStarted","Data":"ca712b7e90cabe7ab5760d401e36cfa4a94b01902cbf9b447c751fc4aa314fd2"} Dec 05 12:11:46 crc kubenswrapper[4711]: I1205 12:11:46.645541 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfx27" event={"ID":"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72","Type":"ContainerStarted","Data":"ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204"} Dec 05 12:11:46 crc kubenswrapper[4711]: E1205 12:11:46.647643 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-5n4h8" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" Dec 05 12:11:46 crc kubenswrapper[4711]: E1205 12:11:46.650123 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-75gn6" podUID="d4613689-1783-4370-9d69-a945cde59468" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.478823 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 12:11:47 crc kubenswrapper[4711]: E1205 12:11:47.479653 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f7ac09-af6e-42c6-a723-2465fabb7465" containerName="pruner" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.479676 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f7ac09-af6e-42c6-a723-2465fabb7465" containerName="pruner" Dec 05 12:11:47 crc kubenswrapper[4711]: E1205 12:11:47.479894 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae4f01f2-ed5e-4462-b048-0f380551fdd7" containerName="pruner" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.479905 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae4f01f2-ed5e-4462-b048-0f380551fdd7" containerName="pruner" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.480075 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6f7ac09-af6e-42c6-a723-2465fabb7465" containerName="pruner" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.480100 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae4f01f2-ed5e-4462-b048-0f380551fdd7" containerName="pruner" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.480726 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.483578 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.484369 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.485155 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.654341 4711 generic.go:334] "Generic (PLEG): container finished" podID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerID="ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204" exitCode=0 Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.654471 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfx27" event={"ID":"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72","Type":"ContainerDied","Data":"ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204"} Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.660901 4711 generic.go:334] "Generic (PLEG): container finished" podID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerID="ca712b7e90cabe7ab5760d401e36cfa4a94b01902cbf9b447c751fc4aa314fd2" exitCode=0 Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.660947 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrks9" event={"ID":"be44c776-1254-4da5-8bb4-e5b4c552a26f","Type":"ContainerDied","Data":"ca712b7e90cabe7ab5760d401e36cfa4a94b01902cbf9b447c751fc4aa314fd2"} Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.688763 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5584bebe-bdbc-4d41-bde2-35da460fab21-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5584bebe-bdbc-4d41-bde2-35da460fab21\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.688840 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5584bebe-bdbc-4d41-bde2-35da460fab21-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5584bebe-bdbc-4d41-bde2-35da460fab21\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.790013 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5584bebe-bdbc-4d41-bde2-35da460fab21-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5584bebe-bdbc-4d41-bde2-35da460fab21\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.790129 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5584bebe-bdbc-4d41-bde2-35da460fab21-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5584bebe-bdbc-4d41-bde2-35da460fab21\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.790235 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5584bebe-bdbc-4d41-bde2-35da460fab21-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5584bebe-bdbc-4d41-bde2-35da460fab21\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.813927 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5584bebe-bdbc-4d41-bde2-35da460fab21-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5584bebe-bdbc-4d41-bde2-35da460fab21\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.992457 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.992616 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.995149 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 12:11:47 crc kubenswrapper[4711]: I1205 12:11:47.995247 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.004380 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.008054 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.093714 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.093785 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.096050 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.104765 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.107121 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.118235 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.119071 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.200248 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.262548 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.269744 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.301102 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.301181 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.367100 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 12:11:48 crc kubenswrapper[4711]: W1205 12:11:48.557547 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-418352d43811f3e9e08df1d18f2bbbc70e09c839a66b25beb2b9545c9ef94dc9 WatchSource:0}: Error finding container 418352d43811f3e9e08df1d18f2bbbc70e09c839a66b25beb2b9545c9ef94dc9: Status 404 returned error can't find the container with id 418352d43811f3e9e08df1d18f2bbbc70e09c839a66b25beb2b9545c9ef94dc9 Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.670892 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"418352d43811f3e9e08df1d18f2bbbc70e09c839a66b25beb2b9545c9ef94dc9"} Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.676806 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"5584bebe-bdbc-4d41-bde2-35da460fab21","Type":"ContainerStarted","Data":"f2f6c9a15aa08d2306d31eefb640a00f4b1bee06b8c61f5ac3a1d9963e297ad8"} Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.702887 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrks9" event={"ID":"be44c776-1254-4da5-8bb4-e5b4c552a26f","Type":"ContainerStarted","Data":"7f1b0ed993c58515c15fd7122c081937e9343b88555125ff37d997420c2beae7"} Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.713595 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfx27" event={"ID":"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72","Type":"ContainerStarted","Data":"d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373"} Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.796436 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qrks9" podStartSLOduration=2.984371977 podStartE2EDuration="40.79641098s" podCreationTimestamp="2025-12-05 12:11:08 +0000 UTC" firstStartedPulling="2025-12-05 12:11:10.242669657 +0000 UTC m=+115.826991977" lastFinishedPulling="2025-12-05 12:11:48.05470865 +0000 UTC m=+153.639030980" observedRunningTime="2025-12-05 12:11:48.793343195 +0000 UTC m=+154.377665525" watchObservedRunningTime="2025-12-05 12:11:48.79641098 +0000 UTC m=+154.380733310" Dec 05 12:11:48 crc kubenswrapper[4711]: I1205 12:11:48.798728 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lfx27" podStartSLOduration=6.071108741 podStartE2EDuration="40.798686096s" podCreationTimestamp="2025-12-05 12:11:08 +0000 UTC" firstStartedPulling="2025-12-05 12:11:13.374310311 +0000 UTC m=+118.958632641" lastFinishedPulling="2025-12-05 12:11:48.101887666 +0000 UTC m=+153.686209996" observedRunningTime="2025-12-05 12:11:48.771366755 +0000 UTC m=+154.355689085" watchObservedRunningTime="2025-12-05 12:11:48.798686096 +0000 UTC m=+154.383008426" Dec 05 12:11:48 crc kubenswrapper[4711]: W1205 12:11:48.892727 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-89a0e065bd321cb235fcd8317c5bbbde5063965a6b8e710963e46f57ad435581 WatchSource:0}: Error finding container 89a0e065bd321cb235fcd8317c5bbbde5063965a6b8e710963e46f57ad435581: Status 404 returned error can't find the container with id 89a0e065bd321cb235fcd8317c5bbbde5063965a6b8e710963e46f57ad435581 Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.174702 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.175172 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.285192 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.285292 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.734001 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"97117ec4be391761823557e064338f7ea59b8f021f7b4487ac79309e2497dd87"} Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.736748 4711 generic.go:334] "Generic (PLEG): container finished" podID="5584bebe-bdbc-4d41-bde2-35da460fab21" containerID="3fcba3b940fbc92d38abc826a11c4b4f644048a529ad26b8ba245210e5cb0f04" exitCode=0 Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.736811 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"5584bebe-bdbc-4d41-bde2-35da460fab21","Type":"ContainerDied","Data":"3fcba3b940fbc92d38abc826a11c4b4f644048a529ad26b8ba245210e5cb0f04"} Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.739250 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"8aad66f6df3eb84ed4e448ee2a6371af53ea4f738e42308edc9e7a7dedc64178"} Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.739310 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"89a0e065bd321cb235fcd8317c5bbbde5063965a6b8e710963e46f57ad435581"} Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.740127 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.742764 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"cfd5909fb0fd1db008cc738abd89d68502abf0c9cf43d33a855ffafc1debabdb"} Dec 05 12:11:49 crc kubenswrapper[4711]: I1205 12:11:49.742818 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e18a716586773c2e127e911e63994e7e3f5c875df3f10ef3bd6a5c9b536289f6"} Dec 05 12:11:50 crc kubenswrapper[4711]: I1205 12:11:50.246923 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qrks9" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerName="registry-server" probeResult="failure" output=< Dec 05 12:11:50 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 12:11:50 crc kubenswrapper[4711]: > Dec 05 12:11:50 crc kubenswrapper[4711]: I1205 12:11:50.331440 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lfx27" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerName="registry-server" probeResult="failure" output=< Dec 05 12:11:50 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 12:11:50 crc kubenswrapper[4711]: > Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.025185 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.148647 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5584bebe-bdbc-4d41-bde2-35da460fab21-kubelet-dir\") pod \"5584bebe-bdbc-4d41-bde2-35da460fab21\" (UID: \"5584bebe-bdbc-4d41-bde2-35da460fab21\") " Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.148753 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5584bebe-bdbc-4d41-bde2-35da460fab21-kube-api-access\") pod \"5584bebe-bdbc-4d41-bde2-35da460fab21\" (UID: \"5584bebe-bdbc-4d41-bde2-35da460fab21\") " Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.148788 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5584bebe-bdbc-4d41-bde2-35da460fab21-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5584bebe-bdbc-4d41-bde2-35da460fab21" (UID: "5584bebe-bdbc-4d41-bde2-35da460fab21"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.149229 4711 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5584bebe-bdbc-4d41-bde2-35da460fab21-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.155431 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5584bebe-bdbc-4d41-bde2-35da460fab21-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5584bebe-bdbc-4d41-bde2-35da460fab21" (UID: "5584bebe-bdbc-4d41-bde2-35da460fab21"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.250871 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5584bebe-bdbc-4d41-bde2-35da460fab21-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.756983 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"5584bebe-bdbc-4d41-bde2-35da460fab21","Type":"ContainerDied","Data":"f2f6c9a15aa08d2306d31eefb640a00f4b1bee06b8c61f5ac3a1d9963e297ad8"} Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.757042 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2f6c9a15aa08d2306d31eefb640a00f4b1bee06b8c61f5ac3a1d9963e297ad8" Dec 05 12:11:51 crc kubenswrapper[4711]: I1205 12:11:51.757097 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.463682 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 12:11:54 crc kubenswrapper[4711]: E1205 12:11:54.464027 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5584bebe-bdbc-4d41-bde2-35da460fab21" containerName="pruner" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.464046 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5584bebe-bdbc-4d41-bde2-35da460fab21" containerName="pruner" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.464187 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5584bebe-bdbc-4d41-bde2-35da460fab21" containerName="pruner" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.464716 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.466867 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.467272 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.476085 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.495320 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-var-lock\") pod \"installer-9-crc\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.495364 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9de3326-467b-46e7-9337-2f470d97e5b9-kube-api-access\") pod \"installer-9-crc\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.495559 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.596520 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.596593 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-var-lock\") pod \"installer-9-crc\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.596618 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9de3326-467b-46e7-9337-2f470d97e5b9-kube-api-access\") pod \"installer-9-crc\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.596689 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.596818 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-var-lock\") pod \"installer-9-crc\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.618485 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9de3326-467b-46e7-9337-2f470d97e5b9-kube-api-access\") pod \"installer-9-crc\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.779839 4711 generic.go:334] "Generic (PLEG): container finished" podID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerID="2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5" exitCode=0 Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.779912 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6fkc" event={"ID":"c8d3db38-6cb9-472a-8c5e-94eaf39e4703","Type":"ContainerDied","Data":"2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5"} Dec 05 12:11:54 crc kubenswrapper[4711]: I1205 12:11:54.804980 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:11:55 crc kubenswrapper[4711]: I1205 12:11:55.021928 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 12:11:55 crc kubenswrapper[4711]: W1205 12:11:55.028761 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poda9de3326_467b_46e7_9337_2f470d97e5b9.slice/crio-42f6a60736c12fd00fb9f1c13e037ee667d62f8441e599dbd224909c422e10c5 WatchSource:0}: Error finding container 42f6a60736c12fd00fb9f1c13e037ee667d62f8441e599dbd224909c422e10c5: Status 404 returned error can't find the container with id 42f6a60736c12fd00fb9f1c13e037ee667d62f8441e599dbd224909c422e10c5 Dec 05 12:11:55 crc kubenswrapper[4711]: I1205 12:11:55.790375 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6fkc" event={"ID":"c8d3db38-6cb9-472a-8c5e-94eaf39e4703","Type":"ContainerStarted","Data":"5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58"} Dec 05 12:11:55 crc kubenswrapper[4711]: I1205 12:11:55.793761 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a9de3326-467b-46e7-9337-2f470d97e5b9","Type":"ContainerStarted","Data":"b6c111b3533609a8ac5f095c3d73e05d767f97680d8aa7ae5d73c4704ae60d74"} Dec 05 12:11:55 crc kubenswrapper[4711]: I1205 12:11:55.793814 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a9de3326-467b-46e7-9337-2f470d97e5b9","Type":"ContainerStarted","Data":"42f6a60736c12fd00fb9f1c13e037ee667d62f8441e599dbd224909c422e10c5"} Dec 05 12:11:55 crc kubenswrapper[4711]: I1205 12:11:55.811928 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t6fkc" podStartSLOduration=2.6853916719999997 podStartE2EDuration="49.811903342s" podCreationTimestamp="2025-12-05 12:11:06 +0000 UTC" firstStartedPulling="2025-12-05 12:11:08.085626371 +0000 UTC m=+113.669948701" lastFinishedPulling="2025-12-05 12:11:55.212138041 +0000 UTC m=+160.796460371" observedRunningTime="2025-12-05 12:11:55.810915907 +0000 UTC m=+161.395238227" watchObservedRunningTime="2025-12-05 12:11:55.811903342 +0000 UTC m=+161.396225672" Dec 05 12:11:55 crc kubenswrapper[4711]: I1205 12:11:55.833222 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.833199764 podStartE2EDuration="1.833199764s" podCreationTimestamp="2025-12-05 12:11:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:11:55.829265727 +0000 UTC m=+161.413588057" watchObservedRunningTime="2025-12-05 12:11:55.833199764 +0000 UTC m=+161.417522084" Dec 05 12:11:56 crc kubenswrapper[4711]: I1205 12:11:56.625626 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:56 crc kubenswrapper[4711]: I1205 12:11:56.625695 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:11:56 crc kubenswrapper[4711]: I1205 12:11:56.803526 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl7dg" event={"ID":"94731d22-9a75-438c-88c1-3cc84818e7e3","Type":"ContainerStarted","Data":"2670fc0fdd5d13bbebb7d69ad160b30f2409ccbc3bf3cf69de2beaad19c12464"} Dec 05 12:11:57 crc kubenswrapper[4711]: I1205 12:11:57.671076 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-t6fkc" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerName="registry-server" probeResult="failure" output=< Dec 05 12:11:57 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 12:11:57 crc kubenswrapper[4711]: > Dec 05 12:11:57 crc kubenswrapper[4711]: I1205 12:11:57.815773 4711 generic.go:334] "Generic (PLEG): container finished" podID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerID="2670fc0fdd5d13bbebb7d69ad160b30f2409ccbc3bf3cf69de2beaad19c12464" exitCode=0 Dec 05 12:11:57 crc kubenswrapper[4711]: I1205 12:11:57.815874 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl7dg" event={"ID":"94731d22-9a75-438c-88c1-3cc84818e7e3","Type":"ContainerDied","Data":"2670fc0fdd5d13bbebb7d69ad160b30f2409ccbc3bf3cf69de2beaad19c12464"} Dec 05 12:11:59 crc kubenswrapper[4711]: I1205 12:11:59.347617 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:59 crc kubenswrapper[4711]: I1205 12:11:59.350587 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:11:59 crc kubenswrapper[4711]: I1205 12:11:59.402036 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:11:59 crc kubenswrapper[4711]: I1205 12:11:59.402455 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:12:00 crc kubenswrapper[4711]: I1205 12:12:00.245901 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lfx27"] Dec 05 12:12:00 crc kubenswrapper[4711]: I1205 12:12:00.832730 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lfx27" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerName="registry-server" containerID="cri-o://d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373" gracePeriod=2 Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.592313 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.680025 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-utilities\") pod \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.680085 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9ztx\" (UniqueName: \"kubernetes.io/projected/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-kube-api-access-w9ztx\") pod \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.680117 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-catalog-content\") pod \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\" (UID: \"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72\") " Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.681222 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-utilities" (OuterVolumeSpecName: "utilities") pod "a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" (UID: "a89f8c7b-89c4-48fb-9b4f-c306be6c2f72"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.687313 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-kube-api-access-w9ztx" (OuterVolumeSpecName: "kube-api-access-w9ztx") pod "a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" (UID: "a89f8c7b-89c4-48fb-9b4f-c306be6c2f72"). InnerVolumeSpecName "kube-api-access-w9ztx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.780488 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" (UID: "a89f8c7b-89c4-48fb-9b4f-c306be6c2f72"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.781513 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.781547 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9ztx\" (UniqueName: \"kubernetes.io/projected/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-kube-api-access-w9ztx\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.781559 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.849863 4711 generic.go:334] "Generic (PLEG): container finished" podID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerID="d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373" exitCode=0 Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.849930 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfx27" event={"ID":"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72","Type":"ContainerDied","Data":"d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373"} Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.849988 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfx27" event={"ID":"a89f8c7b-89c4-48fb-9b4f-c306be6c2f72","Type":"ContainerDied","Data":"f3d6414ad00365dd2cf73a112d547dc4546882e05a34009d7ac03d5dc0de0123"} Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.850013 4711 scope.go:117] "RemoveContainer" containerID="d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373" Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.850011 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lfx27" Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.883340 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lfx27"] Dec 05 12:12:02 crc kubenswrapper[4711]: I1205 12:12:02.896210 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lfx27"] Dec 05 12:12:03 crc kubenswrapper[4711]: I1205 12:12:03.755019 4711 scope.go:117] "RemoveContainer" containerID="ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204" Dec 05 12:12:04 crc kubenswrapper[4711]: I1205 12:12:04.691518 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" path="/var/lib/kubelet/pods/a89f8c7b-89c4-48fb-9b4f-c306be6c2f72/volumes" Dec 05 12:12:05 crc kubenswrapper[4711]: I1205 12:12:05.551595 4711 scope.go:117] "RemoveContainer" containerID="9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f" Dec 05 12:12:05 crc kubenswrapper[4711]: I1205 12:12:05.589552 4711 scope.go:117] "RemoveContainer" containerID="d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373" Dec 05 12:12:05 crc kubenswrapper[4711]: E1205 12:12:05.590294 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373\": container with ID starting with d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373 not found: ID does not exist" containerID="d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373" Dec 05 12:12:05 crc kubenswrapper[4711]: I1205 12:12:05.590364 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373"} err="failed to get container status \"d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373\": rpc error: code = NotFound desc = could not find container \"d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373\": container with ID starting with d5cfe9785dcd42c3dcb0684c03392e017ef83e78297587a1bb24ba7907b9f373 not found: ID does not exist" Dec 05 12:12:05 crc kubenswrapper[4711]: I1205 12:12:05.590444 4711 scope.go:117] "RemoveContainer" containerID="ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204" Dec 05 12:12:05 crc kubenswrapper[4711]: E1205 12:12:05.590807 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204\": container with ID starting with ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204 not found: ID does not exist" containerID="ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204" Dec 05 12:12:05 crc kubenswrapper[4711]: I1205 12:12:05.590848 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204"} err="failed to get container status \"ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204\": rpc error: code = NotFound desc = could not find container \"ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204\": container with ID starting with ed344c17d6555525034179469fd4a124485981a3e4c0f304fa363a4703e26204 not found: ID does not exist" Dec 05 12:12:05 crc kubenswrapper[4711]: I1205 12:12:05.590877 4711 scope.go:117] "RemoveContainer" containerID="9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f" Dec 05 12:12:05 crc kubenswrapper[4711]: E1205 12:12:05.591221 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f\": container with ID starting with 9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f not found: ID does not exist" containerID="9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f" Dec 05 12:12:05 crc kubenswrapper[4711]: I1205 12:12:05.591261 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f"} err="failed to get container status \"9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f\": rpc error: code = NotFound desc = could not find container \"9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f\": container with ID starting with 9a8f7838dbf245bf636535df3de071c37c332a9dc7303bb2e6890f085f274a4f not found: ID does not exist" Dec 05 12:12:06 crc kubenswrapper[4711]: I1205 12:12:06.674095 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:12:06 crc kubenswrapper[4711]: I1205 12:12:06.720120 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:12:06 crc kubenswrapper[4711]: I1205 12:12:06.881167 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl7dg" event={"ID":"94731d22-9a75-438c-88c1-3cc84818e7e3","Type":"ContainerStarted","Data":"4c7b4de4b89df55d873da254c825d15296de6f197bbcf7cc534297e523c5e935"} Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.453058 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t6fkc"] Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.896134 4711 generic.go:334] "Generic (PLEG): container finished" podID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerID="62df3976f9d74301bdd80f543381171ec8267dfda9c8ed49866fb802a0b78a1b" exitCode=0 Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.896251 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6wmqx" event={"ID":"24a27655-eff1-4912-9f6c-42f7d9e68ee3","Type":"ContainerDied","Data":"62df3976f9d74301bdd80f543381171ec8267dfda9c8ed49866fb802a0b78a1b"} Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.898526 4711 generic.go:334] "Generic (PLEG): container finished" podID="d4613689-1783-4370-9d69-a945cde59468" containerID="e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf" exitCode=0 Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.898606 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-75gn6" event={"ID":"d4613689-1783-4370-9d69-a945cde59468","Type":"ContainerDied","Data":"e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf"} Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.901868 4711 generic.go:334] "Generic (PLEG): container finished" podID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerID="03d646c62c8173162ffb73872d2fb7059b643d805f7107328303e82ed87c1e18" exitCode=0 Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.901939 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5n4h8" event={"ID":"a7f95e23-8ec1-48cb-b206-123e5426d705","Type":"ContainerDied","Data":"03d646c62c8173162ffb73872d2fb7059b643d805f7107328303e82ed87c1e18"} Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.904566 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fct8z" event={"ID":"d9a036a6-710b-4d06-bdd1-05c493228cf0","Type":"ContainerStarted","Data":"30b325a2e543af65d29e78859c80460686bff4ca63ca4410c4e60e9a7d9c005b"} Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.904795 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t6fkc" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerName="registry-server" containerID="cri-o://5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58" gracePeriod=2 Dec 05 12:12:07 crc kubenswrapper[4711]: I1205 12:12:07.984044 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tl7dg" podStartSLOduration=5.500612881 podStartE2EDuration="1m2.984018834s" podCreationTimestamp="2025-12-05 12:11:05 +0000 UTC" firstStartedPulling="2025-12-05 12:11:08.068166613 +0000 UTC m=+113.652488943" lastFinishedPulling="2025-12-05 12:12:05.551572556 +0000 UTC m=+171.135894896" observedRunningTime="2025-12-05 12:12:07.981868951 +0000 UTC m=+173.566191311" watchObservedRunningTime="2025-12-05 12:12:07.984018834 +0000 UTC m=+173.568341164" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.283515 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.463256 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-catalog-content\") pod \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.463421 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpn7t\" (UniqueName: \"kubernetes.io/projected/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-kube-api-access-dpn7t\") pod \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.463527 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-utilities\") pod \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\" (UID: \"c8d3db38-6cb9-472a-8c5e-94eaf39e4703\") " Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.464591 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-utilities" (OuterVolumeSpecName: "utilities") pod "c8d3db38-6cb9-472a-8c5e-94eaf39e4703" (UID: "c8d3db38-6cb9-472a-8c5e-94eaf39e4703"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.475640 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-kube-api-access-dpn7t" (OuterVolumeSpecName: "kube-api-access-dpn7t") pod "c8d3db38-6cb9-472a-8c5e-94eaf39e4703" (UID: "c8d3db38-6cb9-472a-8c5e-94eaf39e4703"). InnerVolumeSpecName "kube-api-access-dpn7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.530901 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c8d3db38-6cb9-472a-8c5e-94eaf39e4703" (UID: "c8d3db38-6cb9-472a-8c5e-94eaf39e4703"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.564980 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpn7t\" (UniqueName: \"kubernetes.io/projected/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-kube-api-access-dpn7t\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.565016 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.565027 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8d3db38-6cb9-472a-8c5e-94eaf39e4703-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.919657 4711 generic.go:334] "Generic (PLEG): container finished" podID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerID="30b325a2e543af65d29e78859c80460686bff4ca63ca4410c4e60e9a7d9c005b" exitCode=0 Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.919881 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fct8z" event={"ID":"d9a036a6-710b-4d06-bdd1-05c493228cf0","Type":"ContainerDied","Data":"30b325a2e543af65d29e78859c80460686bff4ca63ca4410c4e60e9a7d9c005b"} Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.925933 4711 generic.go:334] "Generic (PLEG): container finished" podID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerID="5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58" exitCode=0 Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.925978 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6fkc" event={"ID":"c8d3db38-6cb9-472a-8c5e-94eaf39e4703","Type":"ContainerDied","Data":"5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58"} Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.926007 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6fkc" event={"ID":"c8d3db38-6cb9-472a-8c5e-94eaf39e4703","Type":"ContainerDied","Data":"786eab04d7634a4ec6f0d13b1446ede7d835436d1d5fd57b8b097f4365393136"} Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.926027 4711 scope.go:117] "RemoveContainer" containerID="5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.926176 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t6fkc" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.964122 4711 scope.go:117] "RemoveContainer" containerID="2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.987307 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t6fkc"] Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.992220 4711 scope.go:117] "RemoveContainer" containerID="2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4" Dec 05 12:12:08 crc kubenswrapper[4711]: I1205 12:12:08.994555 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t6fkc"] Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.011343 4711 scope.go:117] "RemoveContainer" containerID="5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58" Dec 05 12:12:09 crc kubenswrapper[4711]: E1205 12:12:09.011864 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58\": container with ID starting with 5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58 not found: ID does not exist" containerID="5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58" Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.011896 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58"} err="failed to get container status \"5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58\": rpc error: code = NotFound desc = could not find container \"5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58\": container with ID starting with 5bb0f45438d5727d669e7a4d49872391fe77f7ee7b069f7ba904402ae4edde58 not found: ID does not exist" Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.011920 4711 scope.go:117] "RemoveContainer" containerID="2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5" Dec 05 12:12:09 crc kubenswrapper[4711]: E1205 12:12:09.012122 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5\": container with ID starting with 2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5 not found: ID does not exist" containerID="2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5" Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.012147 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5"} err="failed to get container status \"2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5\": rpc error: code = NotFound desc = could not find container \"2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5\": container with ID starting with 2a64fad28a27451e1b7efb3eca26e4ddfa1c64fa1d8f8bc8f784998ddff130e5 not found: ID does not exist" Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.012158 4711 scope.go:117] "RemoveContainer" containerID="2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4" Dec 05 12:12:09 crc kubenswrapper[4711]: E1205 12:12:09.012330 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4\": container with ID starting with 2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4 not found: ID does not exist" containerID="2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4" Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.012351 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4"} err="failed to get container status \"2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4\": rpc error: code = NotFound desc = could not find container \"2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4\": container with ID starting with 2665f815b5f08a772fdbefce3573518c408559ff0d0740de3fd8b3aadd4e61f4 not found: ID does not exist" Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.941010 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-75gn6" event={"ID":"d4613689-1783-4370-9d69-a945cde59468","Type":"ContainerStarted","Data":"214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429"} Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.944693 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5n4h8" event={"ID":"a7f95e23-8ec1-48cb-b206-123e5426d705","Type":"ContainerStarted","Data":"a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd"} Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.947230 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fct8z" event={"ID":"d9a036a6-710b-4d06-bdd1-05c493228cf0","Type":"ContainerStarted","Data":"45c41523deca05a5d3b8f3cf9a0a732657a442e599d279692a726f17a3ad49fd"} Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.949759 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6wmqx" event={"ID":"24a27655-eff1-4912-9f6c-42f7d9e68ee3","Type":"ContainerStarted","Data":"89489ace4f6304abbada80843c0298e2a741b00080f428f94335262463c9fdf8"} Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.961574 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-75gn6" podStartSLOduration=3.406313931 podStartE2EDuration="1m1.961548035s" podCreationTimestamp="2025-12-05 12:11:08 +0000 UTC" firstStartedPulling="2025-12-05 12:11:10.339340093 +0000 UTC m=+115.923662423" lastFinishedPulling="2025-12-05 12:12:08.894574197 +0000 UTC m=+174.478896527" observedRunningTime="2025-12-05 12:12:09.95932752 +0000 UTC m=+175.543649850" watchObservedRunningTime="2025-12-05 12:12:09.961548035 +0000 UTC m=+175.545870385" Dec 05 12:12:09 crc kubenswrapper[4711]: I1205 12:12:09.984807 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fct8z" podStartSLOduration=3.635920817 podStartE2EDuration="1m4.984773084s" podCreationTimestamp="2025-12-05 12:11:05 +0000 UTC" firstStartedPulling="2025-12-05 12:11:08.0689028 +0000 UTC m=+113.653225120" lastFinishedPulling="2025-12-05 12:12:09.417755057 +0000 UTC m=+175.002077387" observedRunningTime="2025-12-05 12:12:09.981867073 +0000 UTC m=+175.566189413" watchObservedRunningTime="2025-12-05 12:12:09.984773084 +0000 UTC m=+175.569095414" Dec 05 12:12:10 crc kubenswrapper[4711]: I1205 12:12:10.005213 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5n4h8" podStartSLOduration=3.808909184 podStartE2EDuration="1m3.005185435s" podCreationTimestamp="2025-12-05 12:11:07 +0000 UTC" firstStartedPulling="2025-12-05 12:11:09.1448263 +0000 UTC m=+114.729148630" lastFinishedPulling="2025-12-05 12:12:08.341102551 +0000 UTC m=+173.925424881" observedRunningTime="2025-12-05 12:12:10.002374986 +0000 UTC m=+175.586697316" watchObservedRunningTime="2025-12-05 12:12:10.005185435 +0000 UTC m=+175.589507765" Dec 05 12:12:10 crc kubenswrapper[4711]: I1205 12:12:10.025888 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6wmqx" podStartSLOduration=4.351100438 podStartE2EDuration="1m5.025860802s" podCreationTimestamp="2025-12-05 12:11:05 +0000 UTC" firstStartedPulling="2025-12-05 12:11:08.074243392 +0000 UTC m=+113.658565722" lastFinishedPulling="2025-12-05 12:12:08.749003756 +0000 UTC m=+174.333326086" observedRunningTime="2025-12-05 12:12:10.021603928 +0000 UTC m=+175.605926258" watchObservedRunningTime="2025-12-05 12:12:10.025860802 +0000 UTC m=+175.610183132" Dec 05 12:12:10 crc kubenswrapper[4711]: I1205 12:12:10.689744 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" path="/var/lib/kubelet/pods/c8d3db38-6cb9-472a-8c5e-94eaf39e4703/volumes" Dec 05 12:12:15 crc kubenswrapper[4711]: I1205 12:12:15.867481 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:12:15 crc kubenswrapper[4711]: I1205 12:12:15.867911 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:12:15 crc kubenswrapper[4711]: I1205 12:12:15.912239 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:12:16 crc kubenswrapper[4711]: I1205 12:12:16.023218 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:12:16 crc kubenswrapper[4711]: I1205 12:12:16.353706 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:12:16 crc kubenswrapper[4711]: I1205 12:12:16.353755 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:12:16 crc kubenswrapper[4711]: I1205 12:12:16.399795 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:12:16 crc kubenswrapper[4711]: I1205 12:12:16.629939 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:12:16 crc kubenswrapper[4711]: I1205 12:12:16.630010 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:12:16 crc kubenswrapper[4711]: I1205 12:12:16.669946 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:12:17 crc kubenswrapper[4711]: I1205 12:12:17.025333 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:12:17 crc kubenswrapper[4711]: I1205 12:12:17.026159 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:12:18 crc kubenswrapper[4711]: I1205 12:12:18.251919 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:12:18 crc kubenswrapper[4711]: I1205 12:12:18.253736 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:12:18 crc kubenswrapper[4711]: I1205 12:12:18.295760 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:12:18 crc kubenswrapper[4711]: I1205 12:12:18.300448 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:12:18 crc kubenswrapper[4711]: I1205 12:12:18.300526 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:12:18 crc kubenswrapper[4711]: I1205 12:12:18.447895 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fct8z"] Dec 05 12:12:18 crc kubenswrapper[4711]: I1205 12:12:18.698992 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:12:18 crc kubenswrapper[4711]: I1205 12:12:18.699044 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:12:18 crc kubenswrapper[4711]: I1205 12:12:18.750919 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:12:19 crc kubenswrapper[4711]: I1205 12:12:19.000426 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fct8z" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerName="registry-server" containerID="cri-o://45c41523deca05a5d3b8f3cf9a0a732657a442e599d279692a726f17a3ad49fd" gracePeriod=2 Dec 05 12:12:19 crc kubenswrapper[4711]: I1205 12:12:19.056763 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:12:19 crc kubenswrapper[4711]: I1205 12:12:19.067279 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:12:20 crc kubenswrapper[4711]: I1205 12:12:20.853831 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-75gn6"] Dec 05 12:12:21 crc kubenswrapper[4711]: I1205 12:12:21.019246 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-75gn6" podUID="d4613689-1783-4370-9d69-a945cde59468" containerName="registry-server" containerID="cri-o://214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429" gracePeriod=2 Dec 05 12:12:21 crc kubenswrapper[4711]: I1205 12:12:21.621718 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-k4w4c"] Dec 05 12:12:21 crc kubenswrapper[4711]: I1205 12:12:21.951352 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.028480 4711 generic.go:334] "Generic (PLEG): container finished" podID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerID="45c41523deca05a5d3b8f3cf9a0a732657a442e599d279692a726f17a3ad49fd" exitCode=0 Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.028564 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fct8z" event={"ID":"d9a036a6-710b-4d06-bdd1-05c493228cf0","Type":"ContainerDied","Data":"45c41523deca05a5d3b8f3cf9a0a732657a442e599d279692a726f17a3ad49fd"} Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.030941 4711 generic.go:334] "Generic (PLEG): container finished" podID="d4613689-1783-4370-9d69-a945cde59468" containerID="214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429" exitCode=0 Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.030976 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-75gn6" event={"ID":"d4613689-1783-4370-9d69-a945cde59468","Type":"ContainerDied","Data":"214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429"} Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.031002 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-75gn6" event={"ID":"d4613689-1783-4370-9d69-a945cde59468","Type":"ContainerDied","Data":"19d1c7e1b5c68411e257cc730a40f83146b33e5c61728421a572661444177483"} Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.031027 4711 scope.go:117] "RemoveContainer" containerID="214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.031029 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-75gn6" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.054774 4711 scope.go:117] "RemoveContainer" containerID="e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.082422 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.086188 4711 scope.go:117] "RemoveContainer" containerID="610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.111680 4711 scope.go:117] "RemoveContainer" containerID="214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429" Dec 05 12:12:22 crc kubenswrapper[4711]: E1205 12:12:22.112336 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429\": container with ID starting with 214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429 not found: ID does not exist" containerID="214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.112426 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429"} err="failed to get container status \"214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429\": rpc error: code = NotFound desc = could not find container \"214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429\": container with ID starting with 214a4b02b0847918c389ace0dcf0843804b62ca3ab409422af6ddb90c2798429 not found: ID does not exist" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.112457 4711 scope.go:117] "RemoveContainer" containerID="e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.112904 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blltx\" (UniqueName: \"kubernetes.io/projected/d4613689-1783-4370-9d69-a945cde59468-kube-api-access-blltx\") pod \"d4613689-1783-4370-9d69-a945cde59468\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " Dec 05 12:12:22 crc kubenswrapper[4711]: E1205 12:12:22.112942 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf\": container with ID starting with e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf not found: ID does not exist" containerID="e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.112971 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf"} err="failed to get container status \"e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf\": rpc error: code = NotFound desc = could not find container \"e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf\": container with ID starting with e18bd0deddcff3e28aca3330e2a4b84b8efacbed8c4f1ff638814a57a1ee71cf not found: ID does not exist" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.112990 4711 scope.go:117] "RemoveContainer" containerID="610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.113046 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-catalog-content\") pod \"d4613689-1783-4370-9d69-a945cde59468\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.113103 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-utilities\") pod \"d4613689-1783-4370-9d69-a945cde59468\" (UID: \"d4613689-1783-4370-9d69-a945cde59468\") " Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.113134 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-944r8\" (UniqueName: \"kubernetes.io/projected/d9a036a6-710b-4d06-bdd1-05c493228cf0-kube-api-access-944r8\") pod \"d9a036a6-710b-4d06-bdd1-05c493228cf0\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " Dec 05 12:12:22 crc kubenswrapper[4711]: E1205 12:12:22.113371 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e\": container with ID starting with 610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e not found: ID does not exist" containerID="610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.113436 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e"} err="failed to get container status \"610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e\": rpc error: code = NotFound desc = could not find container \"610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e\": container with ID starting with 610e406ee2a148a324285a8cda4b7df53cf125a55bf8ebf0089f1fb8e7583b8e not found: ID does not exist" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.114131 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-utilities" (OuterVolumeSpecName: "utilities") pod "d4613689-1783-4370-9d69-a945cde59468" (UID: "d4613689-1783-4370-9d69-a945cde59468"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.119656 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4613689-1783-4370-9d69-a945cde59468-kube-api-access-blltx" (OuterVolumeSpecName: "kube-api-access-blltx") pod "d4613689-1783-4370-9d69-a945cde59468" (UID: "d4613689-1783-4370-9d69-a945cde59468"). InnerVolumeSpecName "kube-api-access-blltx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.119886 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9a036a6-710b-4d06-bdd1-05c493228cf0-kube-api-access-944r8" (OuterVolumeSpecName: "kube-api-access-944r8") pod "d9a036a6-710b-4d06-bdd1-05c493228cf0" (UID: "d9a036a6-710b-4d06-bdd1-05c493228cf0"). InnerVolumeSpecName "kube-api-access-944r8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.134192 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4613689-1783-4370-9d69-a945cde59468" (UID: "d4613689-1783-4370-9d69-a945cde59468"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.216173 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-catalog-content\") pod \"d9a036a6-710b-4d06-bdd1-05c493228cf0\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.216256 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-utilities\") pod \"d9a036a6-710b-4d06-bdd1-05c493228cf0\" (UID: \"d9a036a6-710b-4d06-bdd1-05c493228cf0\") " Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.216784 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.216810 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4613689-1783-4370-9d69-a945cde59468-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.216830 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-944r8\" (UniqueName: \"kubernetes.io/projected/d9a036a6-710b-4d06-bdd1-05c493228cf0-kube-api-access-944r8\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.216850 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blltx\" (UniqueName: \"kubernetes.io/projected/d4613689-1783-4370-9d69-a945cde59468-kube-api-access-blltx\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.217957 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-utilities" (OuterVolumeSpecName: "utilities") pod "d9a036a6-710b-4d06-bdd1-05c493228cf0" (UID: "d9a036a6-710b-4d06-bdd1-05c493228cf0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.295606 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9a036a6-710b-4d06-bdd1-05c493228cf0" (UID: "d9a036a6-710b-4d06-bdd1-05c493228cf0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.317891 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.317936 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a036a6-710b-4d06-bdd1-05c493228cf0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.363303 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-75gn6"] Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.364827 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-75gn6"] Dec 05 12:12:22 crc kubenswrapper[4711]: I1205 12:12:22.691072 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4613689-1783-4370-9d69-a945cde59468" path="/var/lib/kubelet/pods/d4613689-1783-4370-9d69-a945cde59468/volumes" Dec 05 12:12:23 crc kubenswrapper[4711]: I1205 12:12:23.041281 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fct8z" event={"ID":"d9a036a6-710b-4d06-bdd1-05c493228cf0","Type":"ContainerDied","Data":"4ee8185357af8c761c5da4f35c54e6750803bd8fefbcf19757670c6a1d19829b"} Dec 05 12:12:23 crc kubenswrapper[4711]: I1205 12:12:23.041362 4711 scope.go:117] "RemoveContainer" containerID="45c41523deca05a5d3b8f3cf9a0a732657a442e599d279692a726f17a3ad49fd" Dec 05 12:12:23 crc kubenswrapper[4711]: I1205 12:12:23.041421 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fct8z" Dec 05 12:12:23 crc kubenswrapper[4711]: I1205 12:12:23.062054 4711 scope.go:117] "RemoveContainer" containerID="30b325a2e543af65d29e78859c80460686bff4ca63ca4410c4e60e9a7d9c005b" Dec 05 12:12:23 crc kubenswrapper[4711]: I1205 12:12:23.064957 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fct8z"] Dec 05 12:12:23 crc kubenswrapper[4711]: I1205 12:12:23.069732 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fct8z"] Dec 05 12:12:23 crc kubenswrapper[4711]: I1205 12:12:23.077568 4711 scope.go:117] "RemoveContainer" containerID="9a7c14240545ae070b37df7ca60883806b13479ef650aa39abe07e895d2a8fc6" Dec 05 12:12:24 crc kubenswrapper[4711]: I1205 12:12:24.691506 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" path="/var/lib/kubelet/pods/d9a036a6-710b-4d06-bdd1-05c493228cf0/volumes" Dec 05 12:12:28 crc kubenswrapper[4711]: I1205 12:12:28.267869 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.112830 4711 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113575 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerName="extract-utilities" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113594 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerName="extract-utilities" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113609 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerName="extract-content" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113616 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerName="extract-content" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113626 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113632 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113644 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113649 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113656 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerName="extract-utilities" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113662 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerName="extract-utilities" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113675 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113681 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113690 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4613689-1783-4370-9d69-a945cde59468" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113696 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4613689-1783-4370-9d69-a945cde59468" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113706 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerName="extract-content" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113712 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerName="extract-content" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113721 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4613689-1783-4370-9d69-a945cde59468" containerName="extract-content" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113728 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4613689-1783-4370-9d69-a945cde59468" containerName="extract-content" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113738 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerName="extract-content" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113744 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerName="extract-content" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113752 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerName="extract-utilities" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113757 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerName="extract-utilities" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.113766 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4613689-1783-4370-9d69-a945cde59468" containerName="extract-utilities" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113771 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4613689-1783-4370-9d69-a945cde59468" containerName="extract-utilities" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113859 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a89f8c7b-89c4-48fb-9b4f-c306be6c2f72" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113874 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8d3db38-6cb9-472a-8c5e-94eaf39e4703" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113881 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4613689-1783-4370-9d69-a945cde59468" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.113890 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9a036a6-710b-4d06-bdd1-05c493228cf0" containerName="registry-server" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114348 4711 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114374 4711 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.114501 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114509 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.114521 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114527 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.114533 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114540 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.114548 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114554 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.114562 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114567 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.114577 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114582 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.114591 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114597 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114720 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114729 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114736 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114745 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114754 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.114981 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.115374 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.115934 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b" gracePeriod=15 Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.116010 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50" gracePeriod=15 Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.116205 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05" gracePeriod=15 Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.116176 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d" gracePeriod=15 Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.116081 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8" gracePeriod=15 Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.123678 4711 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.182626 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.284054 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.284104 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.284147 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.284291 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.284608 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.284763 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.284953 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.285045 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386596 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386667 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386729 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386748 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386775 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386793 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386812 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386831 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386801 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386863 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386939 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386897 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386940 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386919 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386967 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.386985 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: I1205 12:12:33.468199 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:12:33 crc kubenswrapper[4711]: E1205 12:12:33.492893 4711 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.38:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e50a052e7dc81 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:12:33.491729537 +0000 UTC m=+199.076051867,LastTimestamp:2025-12-05 12:12:33.491729537 +0000 UTC m=+199.076051867,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.137083 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274"} Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.137598 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"e480d36ed62875cc107581fbe7993e26a4002ecdd9e725b33fc5f6ba60285efb"} Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.137994 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.141247 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.142683 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.143509 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8" exitCode=0 Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.143546 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50" exitCode=0 Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.143562 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d" exitCode=0 Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.143574 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05" exitCode=2 Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.143638 4711 scope.go:117] "RemoveContainer" containerID="dc0ad0f72faed93ba1e382d1f880b4da4fefdfa0aa3aaff57145cfe142c98391" Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.146081 4711 generic.go:334] "Generic (PLEG): container finished" podID="a9de3326-467b-46e7-9337-2f470d97e5b9" containerID="b6c111b3533609a8ac5f095c3d73e05d767f97680d8aa7ae5d73c4704ae60d74" exitCode=0 Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.146224 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a9de3326-467b-46e7-9337-2f470d97e5b9","Type":"ContainerDied","Data":"b6c111b3533609a8ac5f095c3d73e05d767f97680d8aa7ae5d73c4704ae60d74"} Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.147146 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:34 crc kubenswrapper[4711]: I1205 12:12:34.147481 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.156178 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.513085 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.514329 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.515011 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.515195 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.515607 4711 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.516176 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.516564 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.517028 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.517273 4711 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.622922 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.622988 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.623016 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-kubelet-dir\") pod \"a9de3326-467b-46e7-9337-2f470d97e5b9\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.623095 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.623091 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.623133 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9de3326-467b-46e7-9337-2f470d97e5b9-kube-api-access\") pod \"a9de3326-467b-46e7-9337-2f470d97e5b9\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.623184 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.623240 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a9de3326-467b-46e7-9337-2f470d97e5b9" (UID: "a9de3326-467b-46e7-9337-2f470d97e5b9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.623311 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-var-lock\") pod \"a9de3326-467b-46e7-9337-2f470d97e5b9\" (UID: \"a9de3326-467b-46e7-9337-2f470d97e5b9\") " Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.623305 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.623480 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-var-lock" (OuterVolumeSpecName: "var-lock") pod "a9de3326-467b-46e7-9337-2f470d97e5b9" (UID: "a9de3326-467b-46e7-9337-2f470d97e5b9"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.624073 4711 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.624105 4711 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.624123 4711 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.624561 4711 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.624588 4711 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9de3326-467b-46e7-9337-2f470d97e5b9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.632359 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9de3326-467b-46e7-9337-2f470d97e5b9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a9de3326-467b-46e7-9337-2f470d97e5b9" (UID: "a9de3326-467b-46e7-9337-2f470d97e5b9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:12:35 crc kubenswrapper[4711]: I1205 12:12:35.725813 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9de3326-467b-46e7-9337-2f470d97e5b9-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.178470 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.179897 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b" exitCode=0 Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.180002 4711 scope.go:117] "RemoveContainer" containerID="3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.180230 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.184060 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a9de3326-467b-46e7-9337-2f470d97e5b9","Type":"ContainerDied","Data":"42f6a60736c12fd00fb9f1c13e037ee667d62f8441e599dbd224909c422e10c5"} Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.184311 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42f6a60736c12fd00fb9f1c13e037ee667d62f8441e599dbd224909c422e10c5" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.184170 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.199275 4711 scope.go:117] "RemoveContainer" containerID="b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.207528 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.207887 4711 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.208296 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.208805 4711 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.209037 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.209347 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.214557 4711 scope.go:117] "RemoveContainer" containerID="f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.228671 4711 scope.go:117] "RemoveContainer" containerID="d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.249186 4711 scope.go:117] "RemoveContainer" containerID="ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.273487 4711 scope.go:117] "RemoveContainer" containerID="e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.302005 4711 scope.go:117] "RemoveContainer" containerID="3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8" Dec 05 12:12:36 crc kubenswrapper[4711]: E1205 12:12:36.302746 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\": container with ID starting with 3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8 not found: ID does not exist" containerID="3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.302817 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8"} err="failed to get container status \"3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\": rpc error: code = NotFound desc = could not find container \"3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8\": container with ID starting with 3dfc72d14f16b151b979440c96993f3de74e7065d8f3888b3fddb5dabdffa5c8 not found: ID does not exist" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.302861 4711 scope.go:117] "RemoveContainer" containerID="b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50" Dec 05 12:12:36 crc kubenswrapper[4711]: E1205 12:12:36.303674 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\": container with ID starting with b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50 not found: ID does not exist" containerID="b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.303752 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50"} err="failed to get container status \"b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\": rpc error: code = NotFound desc = could not find container \"b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50\": container with ID starting with b0588cad10366f765f35a4e407cca2f8b49bc38173277484cf2d465f108e9b50 not found: ID does not exist" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.303799 4711 scope.go:117] "RemoveContainer" containerID="f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d" Dec 05 12:12:36 crc kubenswrapper[4711]: E1205 12:12:36.304366 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\": container with ID starting with f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d not found: ID does not exist" containerID="f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.304433 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d"} err="failed to get container status \"f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\": rpc error: code = NotFound desc = could not find container \"f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d\": container with ID starting with f5bb5dfe6aba366d5fdf387978a8a91c30c2b95198fb6d93ef49a752d99d1c3d not found: ID does not exist" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.304469 4711 scope.go:117] "RemoveContainer" containerID="d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05" Dec 05 12:12:36 crc kubenswrapper[4711]: E1205 12:12:36.305002 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\": container with ID starting with d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05 not found: ID does not exist" containerID="d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.305025 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05"} err="failed to get container status \"d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\": rpc error: code = NotFound desc = could not find container \"d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05\": container with ID starting with d908966fa0b4feb6fd29bd4dfc3205e2a220a5e268fa5db5d82cdd0e84783f05 not found: ID does not exist" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.305057 4711 scope.go:117] "RemoveContainer" containerID="ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b" Dec 05 12:12:36 crc kubenswrapper[4711]: E1205 12:12:36.305335 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\": container with ID starting with ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b not found: ID does not exist" containerID="ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.305374 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b"} err="failed to get container status \"ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\": rpc error: code = NotFound desc = could not find container \"ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b\": container with ID starting with ac57c2582dbb95898eaf1641d697008c36ca9dc6dbcf63e11e25265fff136d7b not found: ID does not exist" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.305421 4711 scope.go:117] "RemoveContainer" containerID="e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac" Dec 05 12:12:36 crc kubenswrapper[4711]: E1205 12:12:36.305706 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\": container with ID starting with e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac not found: ID does not exist" containerID="e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.305730 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac"} err="failed to get container status \"e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\": rpc error: code = NotFound desc = could not find container \"e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac\": container with ID starting with e85b594fb3720a7d6d07907a1d9f5cc7bfc4f44b73e332ef878d033f3562d3ac not found: ID does not exist" Dec 05 12:12:36 crc kubenswrapper[4711]: I1205 12:12:36.690202 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 12:12:38 crc kubenswrapper[4711]: E1205 12:12:38.072696 4711 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.38:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e50a052e7dc81 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:12:33.491729537 +0000 UTC m=+199.076051867,LastTimestamp:2025-12-05 12:12:33.491729537 +0000 UTC m=+199.076051867,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 12:12:38 crc kubenswrapper[4711]: I1205 12:12:38.686565 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:38 crc kubenswrapper[4711]: I1205 12:12:38.686957 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:40 crc kubenswrapper[4711]: E1205 12:12:40.066712 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:40 crc kubenswrapper[4711]: E1205 12:12:40.067618 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:40 crc kubenswrapper[4711]: E1205 12:12:40.068495 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:40 crc kubenswrapper[4711]: E1205 12:12:40.068904 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:40 crc kubenswrapper[4711]: E1205 12:12:40.069243 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:40 crc kubenswrapper[4711]: I1205 12:12:40.069284 4711 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 12:12:40 crc kubenswrapper[4711]: E1205 12:12:40.069630 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="200ms" Dec 05 12:12:40 crc kubenswrapper[4711]: E1205 12:12:40.270227 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="400ms" Dec 05 12:12:40 crc kubenswrapper[4711]: E1205 12:12:40.671592 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="800ms" Dec 05 12:12:41 crc kubenswrapper[4711]: E1205 12:12:41.473092 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="1.6s" Dec 05 12:12:43 crc kubenswrapper[4711]: E1205 12:12:43.075462 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="3.2s" Dec 05 12:12:46 crc kubenswrapper[4711]: I1205 12:12:46.264299 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 12:12:46 crc kubenswrapper[4711]: I1205 12:12:46.265210 4711 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f" exitCode=1 Dec 05 12:12:46 crc kubenswrapper[4711]: I1205 12:12:46.265277 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f"} Dec 05 12:12:46 crc kubenswrapper[4711]: I1205 12:12:46.266380 4711 scope.go:117] "RemoveContainer" containerID="5f5b4a32dbf33cb0de25ccbbaabfd264b7f19d7e442c168adb2fae0f2b033c0f" Dec 05 12:12:46 crc kubenswrapper[4711]: I1205 12:12:46.267001 4711 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:46 crc kubenswrapper[4711]: I1205 12:12:46.267802 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:46 crc kubenswrapper[4711]: I1205 12:12:46.268446 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:46 crc kubenswrapper[4711]: E1205 12:12:46.281160 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.38:6443: connect: connection refused" interval="6.4s" Dec 05 12:12:46 crc kubenswrapper[4711]: I1205 12:12:46.666370 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" podUID="4dc833de-df68-4794-9093-5698c85833cc" containerName="oauth-openshift" containerID="cri-o://f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959" gracePeriod=15 Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.041890 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.043001 4711 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.043669 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.044310 4711 status_manager.go:851] "Failed to get status for pod" podUID="4dc833de-df68-4794-9093-5698c85833cc" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-k4w4c\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.044608 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192343 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-provider-selection\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192477 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-error\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192514 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-idp-0-file-data\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192547 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-trusted-ca-bundle\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192677 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-service-ca\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192735 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-serving-cert\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192773 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-ocp-branding-template\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192801 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-session\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192833 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-router-certs\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192859 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-cliconfig\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192886 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4dc833de-df68-4794-9093-5698c85833cc-audit-dir\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192913 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ltkp\" (UniqueName: \"kubernetes.io/projected/4dc833de-df68-4794-9093-5698c85833cc-kube-api-access-7ltkp\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192938 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-audit-policies\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.192978 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-login\") pod \"4dc833de-df68-4794-9093-5698c85833cc\" (UID: \"4dc833de-df68-4794-9093-5698c85833cc\") " Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.193015 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4dc833de-df68-4794-9093-5698c85833cc-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.193227 4711 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4dc833de-df68-4794-9093-5698c85833cc-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.193927 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.194021 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.194690 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.195041 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.211991 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.212063 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dc833de-df68-4794-9093-5698c85833cc-kube-api-access-7ltkp" (OuterVolumeSpecName: "kube-api-access-7ltkp") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "kube-api-access-7ltkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.215450 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.215590 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.216179 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.216370 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.216785 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.216983 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.217115 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "4dc833de-df68-4794-9093-5698c85833cc" (UID: "4dc833de-df68-4794-9093-5698c85833cc"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.273231 4711 generic.go:334] "Generic (PLEG): container finished" podID="4dc833de-df68-4794-9093-5698c85833cc" containerID="f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959" exitCode=0 Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.273320 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.273325 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" event={"ID":"4dc833de-df68-4794-9093-5698c85833cc","Type":"ContainerDied","Data":"f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959"} Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.273711 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" event={"ID":"4dc833de-df68-4794-9093-5698c85833cc","Type":"ContainerDied","Data":"1b4635786244777d18cb8ac8017bc6b77538ad68022c26d86a3d7de642f287ef"} Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.273739 4711 scope.go:117] "RemoveContainer" containerID="f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.274433 4711 status_manager.go:851] "Failed to get status for pod" podUID="4dc833de-df68-4794-9093-5698c85833cc" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-k4w4c\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.274737 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.275039 4711 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.275362 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.278558 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.278623 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5f00b9a1de86da1deec0189c65544181cc510bc1ae1cae49290b36fefb6ed784"} Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.279525 4711 status_manager.go:851] "Failed to get status for pod" podUID="4dc833de-df68-4794-9093-5698c85833cc" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-k4w4c\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.279835 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.280539 4711 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.281466 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.288562 4711 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.288813 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.289102 4711 status_manager.go:851] "Failed to get status for pod" podUID="4dc833de-df68-4794-9093-5698c85833cc" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-k4w4c\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.289511 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294249 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294272 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294285 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294294 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294304 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294314 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ltkp\" (UniqueName: \"kubernetes.io/projected/4dc833de-df68-4794-9093-5698c85833cc-kube-api-access-7ltkp\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294323 4711 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294331 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294340 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294349 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294358 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294366 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294375 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4dc833de-df68-4794-9093-5698c85833cc-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.294528 4711 scope.go:117] "RemoveContainer" containerID="f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959" Dec 05 12:12:47 crc kubenswrapper[4711]: E1205 12:12:47.295011 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959\": container with ID starting with f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959 not found: ID does not exist" containerID="f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.295060 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959"} err="failed to get container status \"f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959\": rpc error: code = NotFound desc = could not find container \"f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959\": container with ID starting with f8ff8dbcc5d85206221ab16c9129bf0ded4b29b6c836dd06f14e5b5715f11959 not found: ID does not exist" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.683155 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.684348 4711 status_manager.go:851] "Failed to get status for pod" podUID="4dc833de-df68-4794-9093-5698c85833cc" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-k4w4c\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.684713 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.685219 4711 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.685853 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.702818 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.702843 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:47 crc kubenswrapper[4711]: E1205 12:12:47.703136 4711 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:47 crc kubenswrapper[4711]: I1205 12:12:47.703579 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:47 crc kubenswrapper[4711]: W1205 12:12:47.724826 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-e27937fb1091b672934b7f47001f009a8b95f4a2538fc9d2b0066a58202b78be WatchSource:0}: Error finding container e27937fb1091b672934b7f47001f009a8b95f4a2538fc9d2b0066a58202b78be: Status 404 returned error can't find the container with id e27937fb1091b672934b7f47001f009a8b95f4a2538fc9d2b0066a58202b78be Dec 05 12:12:48 crc kubenswrapper[4711]: E1205 12:12:48.074618 4711 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.38:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e50a052e7dc81 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:12:33.491729537 +0000 UTC m=+199.076051867,LastTimestamp:2025-12-05 12:12:33.491729537 +0000 UTC m=+199.076051867,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.288781 4711 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="2de24e146a5119dc485c6850378cdb4341b50ac565e13ee9831cf3f62ae1ab51" exitCode=0 Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.288871 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"2de24e146a5119dc485c6850378cdb4341b50ac565e13ee9831cf3f62ae1ab51"} Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.288959 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e27937fb1091b672934b7f47001f009a8b95f4a2538fc9d2b0066a58202b78be"} Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.289361 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.289398 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.289907 4711 status_manager.go:851] "Failed to get status for pod" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:48 crc kubenswrapper[4711]: E1205 12:12:48.289999 4711 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.290342 4711 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.290636 4711 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.291262 4711 status_manager.go:851] "Failed to get status for pod" podUID="4dc833de-df68-4794-9093-5698c85833cc" pod="openshift-authentication/oauth-openshift-558db77b4-k4w4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-k4w4c\": dial tcp 38.129.56.38:6443: connect: connection refused" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.301195 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.301268 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.301338 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.302150 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.302219 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4" gracePeriod=600 Dec 05 12:12:48 crc kubenswrapper[4711]: I1205 12:12:48.537998 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:12:49 crc kubenswrapper[4711]: I1205 12:12:49.300224 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4" exitCode=0 Dec 05 12:12:49 crc kubenswrapper[4711]: I1205 12:12:49.300419 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4"} Dec 05 12:12:49 crc kubenswrapper[4711]: I1205 12:12:49.301082 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"c792b1a4c6f9cbaaf0f7997c6c155794129b76126c2b4c55c97bfc3b4edb1356"} Dec 05 12:12:49 crc kubenswrapper[4711]: I1205 12:12:49.310991 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5714d81e20990b2d69e4d72ecaf6b8daaeec48c7f967a28e7cbc7c17d00fbd91"} Dec 05 12:12:49 crc kubenswrapper[4711]: I1205 12:12:49.311045 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0c9ad656e8b74e6f39840aac695eed238ce26167439e24c142b7ff780f761931"} Dec 05 12:12:49 crc kubenswrapper[4711]: I1205 12:12:49.311062 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2c405ce94d82b95cd86810c3eb31592765c611a62e828a13fa634255cccfac9f"} Dec 05 12:12:49 crc kubenswrapper[4711]: I1205 12:12:49.311074 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b8f4cb87c94efa7a869a28c21e3584294643f85df982dd7b97c61dc1cc1bc412"} Dec 05 12:12:50 crc kubenswrapper[4711]: I1205 12:12:50.336523 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ae5a8407d67e61a7aef49ee7cadb3470d45b651ed66095615e6f2391e6304bd3"} Dec 05 12:12:50 crc kubenswrapper[4711]: I1205 12:12:50.336757 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:50 crc kubenswrapper[4711]: I1205 12:12:50.336864 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:50 crc kubenswrapper[4711]: I1205 12:12:50.336896 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:52 crc kubenswrapper[4711]: I1205 12:12:52.703840 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:52 crc kubenswrapper[4711]: I1205 12:12:52.703895 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:52 crc kubenswrapper[4711]: I1205 12:12:52.710412 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:54 crc kubenswrapper[4711]: I1205 12:12:54.884604 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:12:54 crc kubenswrapper[4711]: I1205 12:12:54.890084 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:12:55 crc kubenswrapper[4711]: I1205 12:12:55.353437 4711 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:56 crc kubenswrapper[4711]: I1205 12:12:56.371259 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:56 crc kubenswrapper[4711]: I1205 12:12:56.371297 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:56 crc kubenswrapper[4711]: I1205 12:12:56.379041 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:12:56 crc kubenswrapper[4711]: I1205 12:12:56.382041 4711 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="d4f8b9bf-d657-4d24-837e-14f50ef8e28b" Dec 05 12:12:57 crc kubenswrapper[4711]: I1205 12:12:57.377615 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:57 crc kubenswrapper[4711]: I1205 12:12:57.378568 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:12:58 crc kubenswrapper[4711]: I1205 12:12:58.543268 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:12:58 crc kubenswrapper[4711]: I1205 12:12:58.706276 4711 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="d4f8b9bf-d657-4d24-837e-14f50ef8e28b" Dec 05 12:13:05 crc kubenswrapper[4711]: I1205 12:13:05.663200 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 12:13:05 crc kubenswrapper[4711]: I1205 12:13:05.913180 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 12:13:05 crc kubenswrapper[4711]: I1205 12:13:05.996908 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 12:13:06 crc kubenswrapper[4711]: I1205 12:13:06.182554 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 12:13:06 crc kubenswrapper[4711]: I1205 12:13:06.979221 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.090950 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.184205 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.218601 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.270528 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.409437 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.501636 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.547199 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.631544 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.673408 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.708194 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 12:13:07 crc kubenswrapper[4711]: I1205 12:13:07.833252 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.137649 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.199355 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.307594 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.415086 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.530276 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.658087 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.697267 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.879271 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.918888 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 12:13:08 crc kubenswrapper[4711]: I1205 12:13:08.927331 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.039604 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.059788 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.138554 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.260544 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.331931 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.603344 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.618980 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.708233 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.745341 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.779692 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.800409 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.801435 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.866089 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 12:13:09 crc kubenswrapper[4711]: I1205 12:13:09.894067 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.058666 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.062139 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.142841 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.216098 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.261224 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.276600 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.278652 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.279940 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.307907 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.310266 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.403000 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.426827 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.504570 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.533616 4711 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.535877 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=37.535858192 podStartE2EDuration="37.535858192s" podCreationTimestamp="2025-12-05 12:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:12:55.246752771 +0000 UTC m=+220.831075101" watchObservedRunningTime="2025-12-05 12:13:10.535858192 +0000 UTC m=+236.120180522" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.537579 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-k4w4c","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.537640 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:13:10 crc kubenswrapper[4711]: E1205 12:13:10.537864 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc833de-df68-4794-9093-5698c85833cc" containerName="oauth-openshift" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.537891 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc833de-df68-4794-9093-5698c85833cc" containerName="oauth-openshift" Dec 05 12:13:10 crc kubenswrapper[4711]: E1205 12:13:10.537910 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" containerName="installer" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.537919 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" containerName="installer" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.538043 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dc833de-df68-4794-9093-5698c85833cc" containerName="oauth-openshift" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.538059 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9de3326-467b-46e7-9337-2f470d97e5b9" containerName="installer" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.538140 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.538163 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac6514cc-e4c8-4918-9010-e819d7bc7b32" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.538628 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.541187 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.541456 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.542146 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.543319 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.543408 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.543532 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.543653 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.543707 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.543825 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.543884 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.544113 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.544352 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.544474 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.553334 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.555215 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.563021 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.567190 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=15.567162221 podStartE2EDuration="15.567162221s" podCreationTimestamp="2025-12-05 12:12:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:13:10.562611386 +0000 UTC m=+236.146933726" watchObservedRunningTime="2025-12-05 12:13:10.567162221 +0000 UTC m=+236.151484551" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.634673 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.634784 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-service-ca\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.634808 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.634899 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.634972 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-router-certs\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.635029 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-audit-policies\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.635092 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.635141 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.635189 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-session\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.635217 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-template-login\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.635240 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlxmp\" (UniqueName: \"kubernetes.io/projected/2a588bda-ac60-4890-976f-aaba9e6f9682-kube-api-access-tlxmp\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.635269 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.635304 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-template-error\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.635347 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2a588bda-ac60-4890-976f-aaba9e6f9682-audit-dir\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.692230 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dc833de-df68-4794-9093-5698c85833cc" path="/var/lib/kubelet/pods/4dc833de-df68-4794-9093-5698c85833cc/volumes" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.726063 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.735916 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.735984 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736028 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-session\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736056 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-template-login\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736082 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlxmp\" (UniqueName: \"kubernetes.io/projected/2a588bda-ac60-4890-976f-aaba9e6f9682-kube-api-access-tlxmp\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736111 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736145 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-template-error\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736186 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2a588bda-ac60-4890-976f-aaba9e6f9682-audit-dir\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736209 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736240 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-service-ca\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736260 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736282 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736322 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-router-certs\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736367 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-audit-policies\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.736540 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2a588bda-ac60-4890-976f-aaba9e6f9682-audit-dir\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.737542 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-service-ca\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.737608 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.737976 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.738336 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2a588bda-ac60-4890-976f-aaba9e6f9682-audit-policies\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.744204 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.744406 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-router-certs\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.744731 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.745543 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-template-error\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.745670 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.745708 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.746060 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-user-template-login\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.755764 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2a588bda-ac60-4890-976f-aaba9e6f9682-v4-0-config-system-session\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.756568 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlxmp\" (UniqueName: \"kubernetes.io/projected/2a588bda-ac60-4890-976f-aaba9e6f9682-kube-api-access-tlxmp\") pod \"oauth-openshift-5ff5db57ff-ccdqb\" (UID: \"2a588bda-ac60-4890-976f-aaba9e6f9682\") " pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.756900 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.792581 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.807928 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.868369 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.876247 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.886833 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 12:13:10 crc kubenswrapper[4711]: I1205 12:13:10.945092 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.016428 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.072601 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.119972 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.123432 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.123542 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.145431 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.202501 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.232576 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.236748 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.259867 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.282748 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.285261 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.322510 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.425712 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.432772 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.482580 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.638093 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.751765 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.753966 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.765525 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.781586 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.788827 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.819103 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.825131 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 12:13:11 crc kubenswrapper[4711]: I1205 12:13:11.859925 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.032806 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.053103 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.124183 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.128216 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.134292 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.148110 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.151070 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.161151 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.191750 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.249707 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.260875 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.309988 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.321726 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.387185 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.397979 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.454531 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.548307 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.574501 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.588557 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.597170 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.665557 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.721088 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.721547 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.772694 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.775224 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.793888 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.870656 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.889130 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.902230 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 12:13:12 crc kubenswrapper[4711]: I1205 12:13:12.902877 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.183419 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.287854 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.310165 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.332162 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.394995 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.419814 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.458440 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.494028 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.557687 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.578779 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.639017 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.680035 4711 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.684222 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.835242 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.861241 4711 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 12:13:13 crc kubenswrapper[4711]: I1205 12:13:13.897702 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.043349 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.232283 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.255844 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.377753 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.417940 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.433233 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.578054 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.659086 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.720330 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.745104 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.751753 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.828918 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 12:13:14 crc kubenswrapper[4711]: I1205 12:13:14.865264 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.039122 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.071922 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.105058 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.115867 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.157683 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.163010 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.220060 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.240022 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.241338 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.284969 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.343449 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.448832 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.469623 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.506154 4711 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.543059 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 12:13:15 crc kubenswrapper[4711]: I1205 12:13:15.876451 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.035511 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.193597 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.331540 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.383157 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.397242 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb"] Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.528734 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.557805 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.598216 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.623208 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.624651 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb"] Dec 05 12:13:16 crc kubenswrapper[4711]: W1205 12:13:16.635085 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a588bda_ac60_4890_976f_aaba9e6f9682.slice/crio-11d9ec86f192ade95d2834289920dbf5618905da2807b6a6b785621787d2f4ba WatchSource:0}: Error finding container 11d9ec86f192ade95d2834289920dbf5618905da2807b6a6b785621787d2f4ba: Status 404 returned error can't find the container with id 11d9ec86f192ade95d2834289920dbf5618905da2807b6a6b785621787d2f4ba Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.635102 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.685616 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.689817 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.690024 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.849897 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.877985 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.935408 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 12:13:16 crc kubenswrapper[4711]: I1205 12:13:16.936768 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.316073 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.329990 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.509713 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" event={"ID":"2a588bda-ac60-4890-976f-aaba9e6f9682","Type":"ContainerStarted","Data":"197fd9da0228676bcff5afb7f08c9153f256c7a11d3856732bc83fbb52f1c0f8"} Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.509788 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" event={"ID":"2a588bda-ac60-4890-976f-aaba9e6f9682","Type":"ContainerStarted","Data":"11d9ec86f192ade95d2834289920dbf5618905da2807b6a6b785621787d2f4ba"} Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.511546 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.515779 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.531288 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5ff5db57ff-ccdqb" podStartSLOduration=56.531266803 podStartE2EDuration="56.531266803s" podCreationTimestamp="2025-12-05 12:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:13:17.527611611 +0000 UTC m=+243.111933961" watchObservedRunningTime="2025-12-05 12:13:17.531266803 +0000 UTC m=+243.115589133" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.786438 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.858540 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.918095 4711 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.918468 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274" gracePeriod=5 Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.935276 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.967378 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 12:13:17 crc kubenswrapper[4711]: I1205 12:13:17.981763 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.024856 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.025915 4711 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.040768 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.059446 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.132070 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.169775 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.224211 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.403588 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.519004 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.617492 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.731759 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.782469 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.827086 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.846487 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.882991 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.936478 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.959891 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.988505 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 12:13:18 crc kubenswrapper[4711]: I1205 12:13:18.997165 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.080479 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.323590 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.427869 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.481073 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.510420 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.533493 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.540279 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.604440 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.676582 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.762625 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.779311 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.867642 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.896335 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 12:13:19 crc kubenswrapper[4711]: I1205 12:13:19.954303 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.015295 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.160325 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.318056 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.325912 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.367946 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.453373 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.485674 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.540375 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.626320 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 12:13:20 crc kubenswrapper[4711]: I1205 12:13:20.756837 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 12:13:21 crc kubenswrapper[4711]: I1205 12:13:21.333158 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 12:13:21 crc kubenswrapper[4711]: I1205 12:13:21.475298 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 12:13:22 crc kubenswrapper[4711]: I1205 12:13:22.607399 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.501647 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.501979 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.542700 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.542771 4711 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274" exitCode=137 Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.542828 4711 scope.go:117] "RemoveContainer" containerID="abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.542865 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.563630 4711 scope.go:117] "RemoveContainer" containerID="abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274" Dec 05 12:13:23 crc kubenswrapper[4711]: E1205 12:13:23.564946 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274\": container with ID starting with abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274 not found: ID does not exist" containerID="abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.565021 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274"} err="failed to get container status \"abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274\": rpc error: code = NotFound desc = could not find container \"abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274\": container with ID starting with abedeadab0c1768b2025cfcceadcaa411a669b24354853cc981d164104cae274 not found: ID does not exist" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630204 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630272 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630294 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630487 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630517 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630509 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630527 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630590 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630676 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630819 4711 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630834 4711 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630844 4711 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.630853 4711 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.643200 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:13:23 crc kubenswrapper[4711]: I1205 12:13:23.732492 4711 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:24 crc kubenswrapper[4711]: I1205 12:13:24.690158 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 12:13:24 crc kubenswrapper[4711]: I1205 12:13:24.691043 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 05 12:13:24 crc kubenswrapper[4711]: I1205 12:13:24.701523 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 12:13:24 crc kubenswrapper[4711]: I1205 12:13:24.701626 4711 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="2f554d59-d9d6-4a16-9f40-cb6841ffe551" Dec 05 12:13:24 crc kubenswrapper[4711]: I1205 12:13:24.705993 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 12:13:24 crc kubenswrapper[4711]: I1205 12:13:24.706256 4711 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="2f554d59-d9d6-4a16-9f40-cb6841ffe551" Dec 05 12:13:32 crc kubenswrapper[4711]: I1205 12:13:32.915254 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6lhz6"] Dec 05 12:13:32 crc kubenswrapper[4711]: I1205 12:13:32.916231 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" podUID="04563888-6e73-437a-99b3-9dfa5662ff33" containerName="controller-manager" containerID="cri-o://05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5" gracePeriod=30 Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.022240 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf"] Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.022898 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" podUID="ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" containerName="route-controller-manager" containerID="cri-o://33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2" gracePeriod=30 Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.282494 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.368771 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-proxy-ca-bundles\") pod \"04563888-6e73-437a-99b3-9dfa5662ff33\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.368838 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04563888-6e73-437a-99b3-9dfa5662ff33-serving-cert\") pod \"04563888-6e73-437a-99b3-9dfa5662ff33\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.368923 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-client-ca\") pod \"04563888-6e73-437a-99b3-9dfa5662ff33\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.368947 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm5dw\" (UniqueName: \"kubernetes.io/projected/04563888-6e73-437a-99b3-9dfa5662ff33-kube-api-access-zm5dw\") pod \"04563888-6e73-437a-99b3-9dfa5662ff33\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.369000 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-config\") pod \"04563888-6e73-437a-99b3-9dfa5662ff33\" (UID: \"04563888-6e73-437a-99b3-9dfa5662ff33\") " Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.369986 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-client-ca" (OuterVolumeSpecName: "client-ca") pod "04563888-6e73-437a-99b3-9dfa5662ff33" (UID: "04563888-6e73-437a-99b3-9dfa5662ff33"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.370070 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-config" (OuterVolumeSpecName: "config") pod "04563888-6e73-437a-99b3-9dfa5662ff33" (UID: "04563888-6e73-437a-99b3-9dfa5662ff33"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.370065 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "04563888-6e73-437a-99b3-9dfa5662ff33" (UID: "04563888-6e73-437a-99b3-9dfa5662ff33"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.380499 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04563888-6e73-437a-99b3-9dfa5662ff33-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "04563888-6e73-437a-99b3-9dfa5662ff33" (UID: "04563888-6e73-437a-99b3-9dfa5662ff33"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.381430 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04563888-6e73-437a-99b3-9dfa5662ff33-kube-api-access-zm5dw" (OuterVolumeSpecName: "kube-api-access-zm5dw") pod "04563888-6e73-437a-99b3-9dfa5662ff33" (UID: "04563888-6e73-437a-99b3-9dfa5662ff33"). InnerVolumeSpecName "kube-api-access-zm5dw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.403629 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.470048 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-config\") pod \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.470102 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5r9n\" (UniqueName: \"kubernetes.io/projected/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-kube-api-access-d5r9n\") pod \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.470228 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-serving-cert\") pod \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.470254 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-client-ca\") pod \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\" (UID: \"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4\") " Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.470552 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.470570 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.470579 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04563888-6e73-437a-99b3-9dfa5662ff33-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.470589 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/04563888-6e73-437a-99b3-9dfa5662ff33-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.470599 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm5dw\" (UniqueName: \"kubernetes.io/projected/04563888-6e73-437a-99b3-9dfa5662ff33-kube-api-access-zm5dw\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.471206 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-client-ca" (OuterVolumeSpecName: "client-ca") pod "ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" (UID: "ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.471225 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-config" (OuterVolumeSpecName: "config") pod "ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" (UID: "ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.474265 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" (UID: "ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.474934 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-kube-api-access-d5r9n" (OuterVolumeSpecName: "kube-api-access-d5r9n") pod "ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" (UID: "ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4"). InnerVolumeSpecName "kube-api-access-d5r9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.572298 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.572721 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.572735 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.572747 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5r9n\" (UniqueName: \"kubernetes.io/projected/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4-kube-api-access-d5r9n\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.609601 4711 generic.go:334] "Generic (PLEG): container finished" podID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerID="15f2f472ca86dae38f122ec3bc11940efed779327aead770c8b5ebae6661f214" exitCode=0 Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.609693 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" event={"ID":"0c96a75b-8c13-4da0-abcb-95855f1fbac5","Type":"ContainerDied","Data":"15f2f472ca86dae38f122ec3bc11940efed779327aead770c8b5ebae6661f214"} Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.610238 4711 scope.go:117] "RemoveContainer" containerID="15f2f472ca86dae38f122ec3bc11940efed779327aead770c8b5ebae6661f214" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.611232 4711 generic.go:334] "Generic (PLEG): container finished" podID="04563888-6e73-437a-99b3-9dfa5662ff33" containerID="05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5" exitCode=0 Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.611262 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.611314 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" event={"ID":"04563888-6e73-437a-99b3-9dfa5662ff33","Type":"ContainerDied","Data":"05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5"} Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.611343 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6lhz6" event={"ID":"04563888-6e73-437a-99b3-9dfa5662ff33","Type":"ContainerDied","Data":"86f371e247e355176a0ffc615120baf258676ec4968063d6a2d90c08631d3425"} Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.611366 4711 scope.go:117] "RemoveContainer" containerID="05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.615384 4711 generic.go:334] "Generic (PLEG): container finished" podID="ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" containerID="33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2" exitCode=0 Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.615493 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.615517 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" event={"ID":"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4","Type":"ContainerDied","Data":"33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2"} Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.615734 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf" event={"ID":"ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4","Type":"ContainerDied","Data":"f1e24b2a9949afc5cf12636d27c0caa1f12402bc14fda6ca23bb48fc320da27d"} Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.658789 4711 scope.go:117] "RemoveContainer" containerID="05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5" Dec 05 12:13:33 crc kubenswrapper[4711]: E1205 12:13:33.661242 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5\": container with ID starting with 05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5 not found: ID does not exist" containerID="05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.661272 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5"} err="failed to get container status \"05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5\": rpc error: code = NotFound desc = could not find container \"05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5\": container with ID starting with 05865c6a890a67cf67da80920aff6e0d6a1908748e33d0f1786038d872a2a5d5 not found: ID does not exist" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.661293 4711 scope.go:117] "RemoveContainer" containerID="33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.662434 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6lhz6"] Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.670379 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6lhz6"] Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.679564 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf"] Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.683174 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x86pf"] Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.694361 4711 scope.go:117] "RemoveContainer" containerID="33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2" Dec 05 12:13:33 crc kubenswrapper[4711]: E1205 12:13:33.695929 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2\": container with ID starting with 33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2 not found: ID does not exist" containerID="33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2" Dec 05 12:13:33 crc kubenswrapper[4711]: I1205 12:13:33.696042 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2"} err="failed to get container status \"33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2\": rpc error: code = NotFound desc = could not find container \"33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2\": container with ID starting with 33a2eecd9d1496fca43b738fa235d0af4ca9ae45f5a64c030c7f764e2c11c5f2 not found: ID does not exist" Dec 05 12:13:34 crc kubenswrapper[4711]: I1205 12:13:34.625268 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" event={"ID":"0c96a75b-8c13-4da0-abcb-95855f1fbac5","Type":"ContainerStarted","Data":"c5c210281ec5fb1e8b34c82934e1f46ea9846998cca26bf43f88d58e23ac9ab8"} Dec 05 12:13:34 crc kubenswrapper[4711]: I1205 12:13:34.625947 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:13:34 crc kubenswrapper[4711]: I1205 12:13:34.628845 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:13:34 crc kubenswrapper[4711]: I1205 12:13:34.693081 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04563888-6e73-437a-99b3-9dfa5662ff33" path="/var/lib/kubelet/pods/04563888-6e73-437a-99b3-9dfa5662ff33/volumes" Dec 05 12:13:34 crc kubenswrapper[4711]: I1205 12:13:34.694833 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" path="/var/lib/kubelet/pods/ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4/volumes" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.069688 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp"] Dec 05 12:13:35 crc kubenswrapper[4711]: E1205 12:13:35.070943 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04563888-6e73-437a-99b3-9dfa5662ff33" containerName="controller-manager" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.071164 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="04563888-6e73-437a-99b3-9dfa5662ff33" containerName="controller-manager" Dec 05 12:13:35 crc kubenswrapper[4711]: E1205 12:13:35.071467 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.071638 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 12:13:35 crc kubenswrapper[4711]: E1205 12:13:35.071760 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" containerName="route-controller-manager" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.071884 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" containerName="route-controller-manager" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.072186 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.072341 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec0efd4e-1cdf-42e0-b9c4-1ef4eb0002d4" containerName="route-controller-manager" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.072547 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="04563888-6e73-437a-99b3-9dfa5662ff33" containerName="controller-manager" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.073357 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.078974 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.079315 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.079474 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.080771 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.081004 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.081326 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.085766 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc"] Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.086604 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.088729 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.090910 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc"] Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.091970 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.095843 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.096100 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.098604 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.099231 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.100054 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.102848 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-proxy-ca-bundles\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.102888 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4zkp\" (UniqueName: \"kubernetes.io/projected/d6e70e6b-2633-4c3a-b905-5207e128556e-kube-api-access-j4zkp\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.102946 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6e70e6b-2633-4c3a-b905-5207e128556e-serving-cert\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.103020 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-client-ca\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.103097 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-config\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.112569 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp"] Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.205430 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-client-ca\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.205571 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-config\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.205602 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-config\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.205637 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-proxy-ca-bundles\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.205669 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4zkp\" (UniqueName: \"kubernetes.io/projected/d6e70e6b-2633-4c3a-b905-5207e128556e-kube-api-access-j4zkp\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.205712 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2n6b\" (UniqueName: \"kubernetes.io/projected/f2975117-8f09-4aa9-ae31-911864f1bbc1-kube-api-access-c2n6b\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.205748 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6e70e6b-2633-4c3a-b905-5207e128556e-serving-cert\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.205781 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-client-ca\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.205804 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2975117-8f09-4aa9-ae31-911864f1bbc1-serving-cert\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.206745 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-client-ca\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.206795 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-proxy-ca-bundles\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.207245 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-config\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.219270 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6e70e6b-2633-4c3a-b905-5207e128556e-serving-cert\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.225808 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4zkp\" (UniqueName: \"kubernetes.io/projected/d6e70e6b-2633-4c3a-b905-5207e128556e-kube-api-access-j4zkp\") pod \"controller-manager-6d7f5f44f-4q5hp\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.307033 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2n6b\" (UniqueName: \"kubernetes.io/projected/f2975117-8f09-4aa9-ae31-911864f1bbc1-kube-api-access-c2n6b\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.307097 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-client-ca\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.307118 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2975117-8f09-4aa9-ae31-911864f1bbc1-serving-cert\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.307162 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-config\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.308224 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-client-ca\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.308489 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-config\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.310698 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2975117-8f09-4aa9-ae31-911864f1bbc1-serving-cert\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.323494 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2n6b\" (UniqueName: \"kubernetes.io/projected/f2975117-8f09-4aa9-ae31-911864f1bbc1-kube-api-access-c2n6b\") pod \"route-controller-manager-54cf9984f-4x5mc\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.407802 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.431856 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.708859 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp"] Dec 05 12:13:35 crc kubenswrapper[4711]: I1205 12:13:35.773148 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc"] Dec 05 12:13:35 crc kubenswrapper[4711]: W1205 12:13:35.782238 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2975117_8f09_4aa9_ae31_911864f1bbc1.slice/crio-e6de2ab6bc0a8f15bef462845c1fd95f612c23cf85ce9c82332143e73ac11d1b WatchSource:0}: Error finding container e6de2ab6bc0a8f15bef462845c1fd95f612c23cf85ce9c82332143e73ac11d1b: Status 404 returned error can't find the container with id e6de2ab6bc0a8f15bef462845c1fd95f612c23cf85ce9c82332143e73ac11d1b Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.654163 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" event={"ID":"d6e70e6b-2633-4c3a-b905-5207e128556e","Type":"ContainerStarted","Data":"63423489a248eb192ca6359f724c173c19a4b99f4ffd5eef818530264abfdfe1"} Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.654754 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.654773 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" event={"ID":"d6e70e6b-2633-4c3a-b905-5207e128556e","Type":"ContainerStarted","Data":"5cc0bad39ef3b7f394b5ca9e7f9209c7ae7e5b969d4bfd76903b6452ea2c81d6"} Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.658265 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" event={"ID":"f2975117-8f09-4aa9-ae31-911864f1bbc1","Type":"ContainerStarted","Data":"39c4963dec192b286536d008c3bf513a849587044d473299ac74ecbed102f2c2"} Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.658319 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" event={"ID":"f2975117-8f09-4aa9-ae31-911864f1bbc1","Type":"ContainerStarted","Data":"e6de2ab6bc0a8f15bef462845c1fd95f612c23cf85ce9c82332143e73ac11d1b"} Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.658708 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.662947 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.666161 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.679171 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" podStartSLOduration=3.679140205 podStartE2EDuration="3.679140205s" podCreationTimestamp="2025-12-05 12:13:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:13:36.677344409 +0000 UTC m=+262.261666739" watchObservedRunningTime="2025-12-05 12:13:36.679140205 +0000 UTC m=+262.263462535" Dec 05 12:13:36 crc kubenswrapper[4711]: I1205 12:13:36.705552 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" podStartSLOduration=3.705520019 podStartE2EDuration="3.705520019s" podCreationTimestamp="2025-12-05 12:13:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:13:36.69709314 +0000 UTC m=+262.281415470" watchObservedRunningTime="2025-12-05 12:13:36.705520019 +0000 UTC m=+262.289842359" Dec 05 12:13:38 crc kubenswrapper[4711]: I1205 12:13:38.255776 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 12:13:40 crc kubenswrapper[4711]: I1205 12:13:40.852583 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 12:13:41 crc kubenswrapper[4711]: I1205 12:13:41.176574 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 12:13:45 crc kubenswrapper[4711]: I1205 12:13:45.064458 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 12:13:49 crc kubenswrapper[4711]: I1205 12:13:49.868039 4711 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.222974 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp"] Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.223683 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" podUID="d6e70e6b-2633-4c3a-b905-5207e128556e" containerName="controller-manager" containerID="cri-o://63423489a248eb192ca6359f724c173c19a4b99f4ffd5eef818530264abfdfe1" gracePeriod=30 Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.255441 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc"] Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.255714 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" podUID="f2975117-8f09-4aa9-ae31-911864f1bbc1" containerName="route-controller-manager" containerID="cri-o://39c4963dec192b286536d008c3bf513a849587044d473299ac74ecbed102f2c2" gracePeriod=30 Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.778181 4711 generic.go:334] "Generic (PLEG): container finished" podID="f2975117-8f09-4aa9-ae31-911864f1bbc1" containerID="39c4963dec192b286536d008c3bf513a849587044d473299ac74ecbed102f2c2" exitCode=0 Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.778378 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" event={"ID":"f2975117-8f09-4aa9-ae31-911864f1bbc1","Type":"ContainerDied","Data":"39c4963dec192b286536d008c3bf513a849587044d473299ac74ecbed102f2c2"} Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.778734 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" event={"ID":"f2975117-8f09-4aa9-ae31-911864f1bbc1","Type":"ContainerDied","Data":"e6de2ab6bc0a8f15bef462845c1fd95f612c23cf85ce9c82332143e73ac11d1b"} Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.778756 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6de2ab6bc0a8f15bef462845c1fd95f612c23cf85ce9c82332143e73ac11d1b" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.782277 4711 generic.go:334] "Generic (PLEG): container finished" podID="d6e70e6b-2633-4c3a-b905-5207e128556e" containerID="63423489a248eb192ca6359f724c173c19a4b99f4ffd5eef818530264abfdfe1" exitCode=0 Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.782336 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" event={"ID":"d6e70e6b-2633-4c3a-b905-5207e128556e","Type":"ContainerDied","Data":"63423489a248eb192ca6359f724c173c19a4b99f4ffd5eef818530264abfdfe1"} Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.811018 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.814520 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.820111 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.860128 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-config\") pod \"f2975117-8f09-4aa9-ae31-911864f1bbc1\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.860180 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6e70e6b-2633-4c3a-b905-5207e128556e-serving-cert\") pod \"d6e70e6b-2633-4c3a-b905-5207e128556e\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.860220 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-config\") pod \"d6e70e6b-2633-4c3a-b905-5207e128556e\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.860253 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-client-ca\") pod \"f2975117-8f09-4aa9-ae31-911864f1bbc1\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.860286 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4zkp\" (UniqueName: \"kubernetes.io/projected/d6e70e6b-2633-4c3a-b905-5207e128556e-kube-api-access-j4zkp\") pod \"d6e70e6b-2633-4c3a-b905-5207e128556e\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.860345 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-proxy-ca-bundles\") pod \"d6e70e6b-2633-4c3a-b905-5207e128556e\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.860369 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2975117-8f09-4aa9-ae31-911864f1bbc1-serving-cert\") pod \"f2975117-8f09-4aa9-ae31-911864f1bbc1\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.860440 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-client-ca\") pod \"d6e70e6b-2633-4c3a-b905-5207e128556e\" (UID: \"d6e70e6b-2633-4c3a-b905-5207e128556e\") " Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.860501 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2n6b\" (UniqueName: \"kubernetes.io/projected/f2975117-8f09-4aa9-ae31-911864f1bbc1-kube-api-access-c2n6b\") pod \"f2975117-8f09-4aa9-ae31-911864f1bbc1\" (UID: \"f2975117-8f09-4aa9-ae31-911864f1bbc1\") " Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.861349 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-config" (OuterVolumeSpecName: "config") pod "f2975117-8f09-4aa9-ae31-911864f1bbc1" (UID: "f2975117-8f09-4aa9-ae31-911864f1bbc1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.862347 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-config" (OuterVolumeSpecName: "config") pod "d6e70e6b-2633-4c3a-b905-5207e128556e" (UID: "d6e70e6b-2633-4c3a-b905-5207e128556e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.864455 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-client-ca" (OuterVolumeSpecName: "client-ca") pod "d6e70e6b-2633-4c3a-b905-5207e128556e" (UID: "d6e70e6b-2633-4c3a-b905-5207e128556e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.866515 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d6e70e6b-2633-4c3a-b905-5207e128556e" (UID: "d6e70e6b-2633-4c3a-b905-5207e128556e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.866747 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-client-ca" (OuterVolumeSpecName: "client-ca") pod "f2975117-8f09-4aa9-ae31-911864f1bbc1" (UID: "f2975117-8f09-4aa9-ae31-911864f1bbc1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.873622 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2975117-8f09-4aa9-ae31-911864f1bbc1-kube-api-access-c2n6b" (OuterVolumeSpecName: "kube-api-access-c2n6b") pod "f2975117-8f09-4aa9-ae31-911864f1bbc1" (UID: "f2975117-8f09-4aa9-ae31-911864f1bbc1"). InnerVolumeSpecName "kube-api-access-c2n6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.873662 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6e70e6b-2633-4c3a-b905-5207e128556e-kube-api-access-j4zkp" (OuterVolumeSpecName: "kube-api-access-j4zkp") pod "d6e70e6b-2633-4c3a-b905-5207e128556e" (UID: "d6e70e6b-2633-4c3a-b905-5207e128556e"). InnerVolumeSpecName "kube-api-access-j4zkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.873617 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6e70e6b-2633-4c3a-b905-5207e128556e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d6e70e6b-2633-4c3a-b905-5207e128556e" (UID: "d6e70e6b-2633-4c3a-b905-5207e128556e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.873625 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2975117-8f09-4aa9-ae31-911864f1bbc1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f2975117-8f09-4aa9-ae31-911864f1bbc1" (UID: "f2975117-8f09-4aa9-ae31-911864f1bbc1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.962166 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2n6b\" (UniqueName: \"kubernetes.io/projected/f2975117-8f09-4aa9-ae31-911864f1bbc1-kube-api-access-c2n6b\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.962211 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.962221 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6e70e6b-2633-4c3a-b905-5207e128556e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.962230 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.962239 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f2975117-8f09-4aa9-ae31-911864f1bbc1-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.962247 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4zkp\" (UniqueName: \"kubernetes.io/projected/d6e70e6b-2633-4c3a-b905-5207e128556e-kube-api-access-j4zkp\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.962254 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.962262 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f2975117-8f09-4aa9-ae31-911864f1bbc1-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:53 crc kubenswrapper[4711]: I1205 12:13:53.962271 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6e70e6b-2633-4c3a-b905-5207e128556e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:13:54 crc kubenswrapper[4711]: I1205 12:13:54.790155 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc" Dec 05 12:13:54 crc kubenswrapper[4711]: I1205 12:13:54.790161 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" event={"ID":"d6e70e6b-2633-4c3a-b905-5207e128556e","Type":"ContainerDied","Data":"5cc0bad39ef3b7f394b5ca9e7f9209c7ae7e5b969d4bfd76903b6452ea2c81d6"} Dec 05 12:13:54 crc kubenswrapper[4711]: I1205 12:13:54.790193 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp" Dec 05 12:13:54 crc kubenswrapper[4711]: I1205 12:13:54.790242 4711 scope.go:117] "RemoveContainer" containerID="63423489a248eb192ca6359f724c173c19a4b99f4ffd5eef818530264abfdfe1" Dec 05 12:13:54 crc kubenswrapper[4711]: I1205 12:13:54.818443 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp"] Dec 05 12:13:54 crc kubenswrapper[4711]: I1205 12:13:54.825209 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6d7f5f44f-4q5hp"] Dec 05 12:13:54 crc kubenswrapper[4711]: I1205 12:13:54.831034 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc"] Dec 05 12:13:54 crc kubenswrapper[4711]: I1205 12:13:54.833948 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54cf9984f-4x5mc"] Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.007222 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.078324 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf"] Dec 05 12:13:55 crc kubenswrapper[4711]: E1205 12:13:55.078643 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6e70e6b-2633-4c3a-b905-5207e128556e" containerName="controller-manager" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.078662 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6e70e6b-2633-4c3a-b905-5207e128556e" containerName="controller-manager" Dec 05 12:13:55 crc kubenswrapper[4711]: E1205 12:13:55.078675 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2975117-8f09-4aa9-ae31-911864f1bbc1" containerName="route-controller-manager" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.078683 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2975117-8f09-4aa9-ae31-911864f1bbc1" containerName="route-controller-manager" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.078805 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6e70e6b-2633-4c3a-b905-5207e128556e" containerName="controller-manager" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.078823 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2975117-8f09-4aa9-ae31-911864f1bbc1" containerName="route-controller-manager" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.079263 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.081977 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.082135 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.082206 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs"] Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.082619 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.082623 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.083025 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.085288 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.085497 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.085926 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.087653 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.088229 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.088256 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.088411 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.088567 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.097589 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf"] Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.097841 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.107169 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs"] Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.178101 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2cfc9d2-b6b4-4c03-bf50-8350457be378-client-ca\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.178176 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcba57f8-6ea8-4133-afdf-5aad285815b0-config\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.178370 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxgwp\" (UniqueName: \"kubernetes.io/projected/dcba57f8-6ea8-4133-afdf-5aad285815b0-kube-api-access-xxgwp\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.178451 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcba57f8-6ea8-4133-afdf-5aad285815b0-client-ca\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.178514 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcba57f8-6ea8-4133-afdf-5aad285815b0-proxy-ca-bundles\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.178537 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dv5h\" (UniqueName: \"kubernetes.io/projected/c2cfc9d2-b6b4-4c03-bf50-8350457be378-kube-api-access-5dv5h\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.178555 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2cfc9d2-b6b4-4c03-bf50-8350457be378-serving-cert\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.178602 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcba57f8-6ea8-4133-afdf-5aad285815b0-serving-cert\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.178654 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2cfc9d2-b6b4-4c03-bf50-8350457be378-config\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.280575 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcba57f8-6ea8-4133-afdf-5aad285815b0-proxy-ca-bundles\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.280635 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2cfc9d2-b6b4-4c03-bf50-8350457be378-serving-cert\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.280657 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dv5h\" (UniqueName: \"kubernetes.io/projected/c2cfc9d2-b6b4-4c03-bf50-8350457be378-kube-api-access-5dv5h\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.280688 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcba57f8-6ea8-4133-afdf-5aad285815b0-serving-cert\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.280725 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2cfc9d2-b6b4-4c03-bf50-8350457be378-config\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.280747 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2cfc9d2-b6b4-4c03-bf50-8350457be378-client-ca\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.280798 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcba57f8-6ea8-4133-afdf-5aad285815b0-config\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.280831 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxgwp\" (UniqueName: \"kubernetes.io/projected/dcba57f8-6ea8-4133-afdf-5aad285815b0-kube-api-access-xxgwp\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.280855 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcba57f8-6ea8-4133-afdf-5aad285815b0-client-ca\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.282217 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2cfc9d2-b6b4-4c03-bf50-8350457be378-client-ca\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.283464 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcba57f8-6ea8-4133-afdf-5aad285815b0-client-ca\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.283526 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcba57f8-6ea8-4133-afdf-5aad285815b0-proxy-ca-bundles\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.283550 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2cfc9d2-b6b4-4c03-bf50-8350457be378-config\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.283859 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcba57f8-6ea8-4133-afdf-5aad285815b0-config\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.287431 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcba57f8-6ea8-4133-afdf-5aad285815b0-serving-cert\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.287858 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2cfc9d2-b6b4-4c03-bf50-8350457be378-serving-cert\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.300125 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxgwp\" (UniqueName: \"kubernetes.io/projected/dcba57f8-6ea8-4133-afdf-5aad285815b0-kube-api-access-xxgwp\") pod \"controller-manager-66bd4d75c8-cg8zf\" (UID: \"dcba57f8-6ea8-4133-afdf-5aad285815b0\") " pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.305640 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dv5h\" (UniqueName: \"kubernetes.io/projected/c2cfc9d2-b6b4-4c03-bf50-8350457be378-kube-api-access-5dv5h\") pod \"route-controller-manager-79467cd47c-jwfjs\" (UID: \"c2cfc9d2-b6b4-4c03-bf50-8350457be378\") " pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.400091 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.418034 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.674342 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf"] Dec 05 12:13:55 crc kubenswrapper[4711]: W1205 12:13:55.681101 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddcba57f8_6ea8_4133_afdf_5aad285815b0.slice/crio-f3df4ca0e31ffa2418cf89097af543c7c349dc362d2d34459ac2b1ed0602e1c7 WatchSource:0}: Error finding container f3df4ca0e31ffa2418cf89097af543c7c349dc362d2d34459ac2b1ed0602e1c7: Status 404 returned error can't find the container with id f3df4ca0e31ffa2418cf89097af543c7c349dc362d2d34459ac2b1ed0602e1c7 Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.797510 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" event={"ID":"dcba57f8-6ea8-4133-afdf-5aad285815b0","Type":"ContainerStarted","Data":"f3df4ca0e31ffa2418cf89097af543c7c349dc362d2d34459ac2b1ed0602e1c7"} Dec 05 12:13:55 crc kubenswrapper[4711]: I1205 12:13:55.914719 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs"] Dec 05 12:13:55 crc kubenswrapper[4711]: W1205 12:13:55.923128 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2cfc9d2_b6b4_4c03_bf50_8350457be378.slice/crio-caa57dc493165ad3049a69812163e178a3176a1b7a6f22e78edb6c7ddc7ace41 WatchSource:0}: Error finding container caa57dc493165ad3049a69812163e178a3176a1b7a6f22e78edb6c7ddc7ace41: Status 404 returned error can't find the container with id caa57dc493165ad3049a69812163e178a3176a1b7a6f22e78edb6c7ddc7ace41 Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.690178 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6e70e6b-2633-4c3a-b905-5207e128556e" path="/var/lib/kubelet/pods/d6e70e6b-2633-4c3a-b905-5207e128556e/volumes" Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.691456 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2975117-8f09-4aa9-ae31-911864f1bbc1" path="/var/lib/kubelet/pods/f2975117-8f09-4aa9-ae31-911864f1bbc1/volumes" Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.805243 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" event={"ID":"dcba57f8-6ea8-4133-afdf-5aad285815b0","Type":"ContainerStarted","Data":"479b605041334e07666eef0d77bc3bd73d2b965e3978f5e3caceb4e538413677"} Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.805421 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.806422 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" event={"ID":"c2cfc9d2-b6b4-4c03-bf50-8350457be378","Type":"ContainerStarted","Data":"1cd854584ebeb0361e18290c2b5ce60bec11622b71cbcb52239ce6121fcc0e50"} Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.806447 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" event={"ID":"c2cfc9d2-b6b4-4c03-bf50-8350457be378","Type":"ContainerStarted","Data":"caa57dc493165ad3049a69812163e178a3176a1b7a6f22e78edb6c7ddc7ace41"} Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.807577 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.810275 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.811598 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.829164 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-66bd4d75c8-cg8zf" podStartSLOduration=3.8291363609999998 podStartE2EDuration="3.829136361s" podCreationTimestamp="2025-12-05 12:13:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:13:56.825200503 +0000 UTC m=+282.409522843" watchObservedRunningTime="2025-12-05 12:13:56.829136361 +0000 UTC m=+282.413458681" Dec 05 12:13:56 crc kubenswrapper[4711]: I1205 12:13:56.849869 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-79467cd47c-jwfjs" podStartSLOduration=3.849844746 podStartE2EDuration="3.849844746s" podCreationTimestamp="2025-12-05 12:13:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:13:56.8432007 +0000 UTC m=+282.427523040" watchObservedRunningTime="2025-12-05 12:13:56.849844746 +0000 UTC m=+282.434167076" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.219693 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-5nhvg"] Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.226148 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.240684 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-5nhvg"] Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.346930 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cb54057c-5d95-430f-bb2b-5c49dfd41118-registry-certificates\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.346999 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cb54057c-5d95-430f-bb2b-5c49dfd41118-ca-trust-extracted\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.347034 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cb54057c-5d95-430f-bb2b-5c49dfd41118-installation-pull-secrets\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.347059 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jc2s\" (UniqueName: \"kubernetes.io/projected/cb54057c-5d95-430f-bb2b-5c49dfd41118-kube-api-access-7jc2s\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.347172 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.347326 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cb54057c-5d95-430f-bb2b-5c49dfd41118-bound-sa-token\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.347520 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb54057c-5d95-430f-bb2b-5c49dfd41118-trusted-ca\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.347722 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cb54057c-5d95-430f-bb2b-5c49dfd41118-registry-tls\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.384962 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.449175 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cb54057c-5d95-430f-bb2b-5c49dfd41118-registry-tls\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.449223 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cb54057c-5d95-430f-bb2b-5c49dfd41118-registry-certificates\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.449245 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cb54057c-5d95-430f-bb2b-5c49dfd41118-ca-trust-extracted\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.449273 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jc2s\" (UniqueName: \"kubernetes.io/projected/cb54057c-5d95-430f-bb2b-5c49dfd41118-kube-api-access-7jc2s\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.449290 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cb54057c-5d95-430f-bb2b-5c49dfd41118-installation-pull-secrets\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.449312 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cb54057c-5d95-430f-bb2b-5c49dfd41118-bound-sa-token\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.449342 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb54057c-5d95-430f-bb2b-5c49dfd41118-trusted-ca\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.449975 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cb54057c-5d95-430f-bb2b-5c49dfd41118-ca-trust-extracted\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.450762 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb54057c-5d95-430f-bb2b-5c49dfd41118-trusted-ca\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.451002 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cb54057c-5d95-430f-bb2b-5c49dfd41118-registry-certificates\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.456019 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cb54057c-5d95-430f-bb2b-5c49dfd41118-installation-pull-secrets\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.456589 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cb54057c-5d95-430f-bb2b-5c49dfd41118-registry-tls\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.470228 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cb54057c-5d95-430f-bb2b-5c49dfd41118-bound-sa-token\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.472229 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jc2s\" (UniqueName: \"kubernetes.io/projected/cb54057c-5d95-430f-bb2b-5c49dfd41118-kube-api-access-7jc2s\") pod \"image-registry-66df7c8f76-5nhvg\" (UID: \"cb54057c-5d95-430f-bb2b-5c49dfd41118\") " pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.556271 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:03 crc kubenswrapper[4711]: I1205 12:14:03.987195 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-5nhvg"] Dec 05 12:14:03 crc kubenswrapper[4711]: W1205 12:14:03.988545 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb54057c_5d95_430f_bb2b_5c49dfd41118.slice/crio-2713a605d87057f6c1a5bed0043273db337af017f0448112a6e47a0f349e216a WatchSource:0}: Error finding container 2713a605d87057f6c1a5bed0043273db337af017f0448112a6e47a0f349e216a: Status 404 returned error can't find the container with id 2713a605d87057f6c1a5bed0043273db337af017f0448112a6e47a0f349e216a Dec 05 12:14:04 crc kubenswrapper[4711]: I1205 12:14:04.849071 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" event={"ID":"cb54057c-5d95-430f-bb2b-5c49dfd41118","Type":"ContainerStarted","Data":"87fa2c0bade1044a952b8b72e133a046bbfff28529a1f1bc825011cf8d51b54f"} Dec 05 12:14:04 crc kubenswrapper[4711]: I1205 12:14:04.849591 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" event={"ID":"cb54057c-5d95-430f-bb2b-5c49dfd41118","Type":"ContainerStarted","Data":"2713a605d87057f6c1a5bed0043273db337af017f0448112a6e47a0f349e216a"} Dec 05 12:14:04 crc kubenswrapper[4711]: I1205 12:14:04.849648 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:04 crc kubenswrapper[4711]: I1205 12:14:04.870276 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" podStartSLOduration=1.8702499320000001 podStartE2EDuration="1.870249932s" podCreationTimestamp="2025-12-05 12:14:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:14:04.868754295 +0000 UTC m=+290.453076655" watchObservedRunningTime="2025-12-05 12:14:04.870249932 +0000 UTC m=+290.454572262" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.539471 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6wmqx"] Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.540483 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6wmqx" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerName="registry-server" containerID="cri-o://89489ace4f6304abbada80843c0298e2a741b00080f428f94335262463c9fdf8" gracePeriod=30 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.558427 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tl7dg"] Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.558876 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tl7dg" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerName="registry-server" containerID="cri-o://4c7b4de4b89df55d873da254c825d15296de6f197bbcf7cc534297e523c5e935" gracePeriod=30 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.577635 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dmv5p"] Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.577965 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerName="marketplace-operator" containerID="cri-o://c5c210281ec5fb1e8b34c82934e1f46ea9846998cca26bf43f88d58e23ac9ab8" gracePeriod=30 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.589960 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5n4h8"] Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.590376 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5n4h8" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerName="registry-server" containerID="cri-o://a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd" gracePeriod=30 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.596913 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2hspf"] Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.597901 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.606486 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qrks9"] Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.606809 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qrks9" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerName="registry-server" containerID="cri-o://7f1b0ed993c58515c15fd7122c081937e9343b88555125ff37d997420c2beae7" gracePeriod=30 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.612284 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2hspf"] Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.741304 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cf774310-240f-4e72-9154-1321129d54dd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2hspf\" (UID: \"cf774310-240f-4e72-9154-1321129d54dd\") " pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.741779 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cf774310-240f-4e72-9154-1321129d54dd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2hspf\" (UID: \"cf774310-240f-4e72-9154-1321129d54dd\") " pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.741847 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wxkj\" (UniqueName: \"kubernetes.io/projected/cf774310-240f-4e72-9154-1321129d54dd-kube-api-access-8wxkj\") pod \"marketplace-operator-79b997595-2hspf\" (UID: \"cf774310-240f-4e72-9154-1321129d54dd\") " pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.842979 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wxkj\" (UniqueName: \"kubernetes.io/projected/cf774310-240f-4e72-9154-1321129d54dd-kube-api-access-8wxkj\") pod \"marketplace-operator-79b997595-2hspf\" (UID: \"cf774310-240f-4e72-9154-1321129d54dd\") " pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.843072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cf774310-240f-4e72-9154-1321129d54dd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2hspf\" (UID: \"cf774310-240f-4e72-9154-1321129d54dd\") " pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.843120 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cf774310-240f-4e72-9154-1321129d54dd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2hspf\" (UID: \"cf774310-240f-4e72-9154-1321129d54dd\") " pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.845842 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cf774310-240f-4e72-9154-1321129d54dd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2hspf\" (UID: \"cf774310-240f-4e72-9154-1321129d54dd\") " pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.852748 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cf774310-240f-4e72-9154-1321129d54dd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2hspf\" (UID: \"cf774310-240f-4e72-9154-1321129d54dd\") " pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.861864 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wxkj\" (UniqueName: \"kubernetes.io/projected/cf774310-240f-4e72-9154-1321129d54dd-kube-api-access-8wxkj\") pod \"marketplace-operator-79b997595-2hspf\" (UID: \"cf774310-240f-4e72-9154-1321129d54dd\") " pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.872652 4711 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-dmv5p container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.872719 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.872893 4711 generic.go:334] "Generic (PLEG): container finished" podID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerID="89489ace4f6304abbada80843c0298e2a741b00080f428f94335262463c9fdf8" exitCode=0 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.873103 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6wmqx" event={"ID":"24a27655-eff1-4912-9f6c-42f7d9e68ee3","Type":"ContainerDied","Data":"89489ace4f6304abbada80843c0298e2a741b00080f428f94335262463c9fdf8"} Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.875116 4711 generic.go:334] "Generic (PLEG): container finished" podID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerID="7f1b0ed993c58515c15fd7122c081937e9343b88555125ff37d997420c2beae7" exitCode=0 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.875178 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrks9" event={"ID":"be44c776-1254-4da5-8bb4-e5b4c552a26f","Type":"ContainerDied","Data":"7f1b0ed993c58515c15fd7122c081937e9343b88555125ff37d997420c2beae7"} Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.877100 4711 generic.go:334] "Generic (PLEG): container finished" podID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerID="a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd" exitCode=0 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.877165 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5n4h8" event={"ID":"a7f95e23-8ec1-48cb-b206-123e5426d705","Type":"ContainerDied","Data":"a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd"} Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.880107 4711 generic.go:334] "Generic (PLEG): container finished" podID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerID="4c7b4de4b89df55d873da254c825d15296de6f197bbcf7cc534297e523c5e935" exitCode=0 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.880191 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl7dg" event={"ID":"94731d22-9a75-438c-88c1-3cc84818e7e3","Type":"ContainerDied","Data":"4c7b4de4b89df55d873da254c825d15296de6f197bbcf7cc534297e523c5e935"} Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.882863 4711 generic.go:334] "Generic (PLEG): container finished" podID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerID="c5c210281ec5fb1e8b34c82934e1f46ea9846998cca26bf43f88d58e23ac9ab8" exitCode=0 Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.882915 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" event={"ID":"0c96a75b-8c13-4da0-abcb-95855f1fbac5","Type":"ContainerDied","Data":"c5c210281ec5fb1e8b34c82934e1f46ea9846998cca26bf43f88d58e23ac9ab8"} Dec 05 12:14:07 crc kubenswrapper[4711]: I1205 12:14:07.882959 4711 scope.go:117] "RemoveContainer" containerID="15f2f472ca86dae38f122ec3bc11940efed779327aead770c8b5ebae6661f214" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.021890 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.093945 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.249887 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-catalog-content\") pod \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.250494 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-utilities\") pod \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.250583 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8w2t\" (UniqueName: \"kubernetes.io/projected/24a27655-eff1-4912-9f6c-42f7d9e68ee3-kube-api-access-m8w2t\") pod \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\" (UID: \"24a27655-eff1-4912-9f6c-42f7d9e68ee3\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.252478 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-utilities" (OuterVolumeSpecName: "utilities") pod "24a27655-eff1-4912-9f6c-42f7d9e68ee3" (UID: "24a27655-eff1-4912-9f6c-42f7d9e68ee3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.263337 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24a27655-eff1-4912-9f6c-42f7d9e68ee3-kube-api-access-m8w2t" (OuterVolumeSpecName: "kube-api-access-m8w2t") pod "24a27655-eff1-4912-9f6c-42f7d9e68ee3" (UID: "24a27655-eff1-4912-9f6c-42f7d9e68ee3"). InnerVolumeSpecName "kube-api-access-m8w2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: E1205 12:14:08.273531 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd is running failed: container process not found" containerID="a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 12:14:08 crc kubenswrapper[4711]: E1205 12:14:08.278253 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd is running failed: container process not found" containerID="a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 12:14:08 crc kubenswrapper[4711]: E1205 12:14:08.278957 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd is running failed: container process not found" containerID="a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 12:14:08 crc kubenswrapper[4711]: E1205 12:14:08.279048 4711 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-5n4h8" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerName="registry-server" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.330408 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "24a27655-eff1-4912-9f6c-42f7d9e68ee3" (UID: "24a27655-eff1-4912-9f6c-42f7d9e68ee3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.337485 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.340849 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.351101 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.355219 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.355268 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8w2t\" (UniqueName: \"kubernetes.io/projected/24a27655-eff1-4912-9f6c-42f7d9e68ee3-kube-api-access-m8w2t\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.355283 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a27655-eff1-4912-9f6c-42f7d9e68ee3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.366487 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456257 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-operator-metrics\") pod \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456319 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9qpn\" (UniqueName: \"kubernetes.io/projected/be44c776-1254-4da5-8bb4-e5b4c552a26f-kube-api-access-v9qpn\") pod \"be44c776-1254-4da5-8bb4-e5b4c552a26f\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456404 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-catalog-content\") pod \"be44c776-1254-4da5-8bb4-e5b4c552a26f\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456438 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbhmt\" (UniqueName: \"kubernetes.io/projected/0c96a75b-8c13-4da0-abcb-95855f1fbac5-kube-api-access-tbhmt\") pod \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456485 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-utilities\") pod \"a7f95e23-8ec1-48cb-b206-123e5426d705\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456538 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nftp\" (UniqueName: \"kubernetes.io/projected/a7f95e23-8ec1-48cb-b206-123e5426d705-kube-api-access-6nftp\") pod \"a7f95e23-8ec1-48cb-b206-123e5426d705\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456609 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-catalog-content\") pod \"94731d22-9a75-438c-88c1-3cc84818e7e3\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456638 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-catalog-content\") pod \"a7f95e23-8ec1-48cb-b206-123e5426d705\" (UID: \"a7f95e23-8ec1-48cb-b206-123e5426d705\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456669 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt6dw\" (UniqueName: \"kubernetes.io/projected/94731d22-9a75-438c-88c1-3cc84818e7e3-kube-api-access-tt6dw\") pod \"94731d22-9a75-438c-88c1-3cc84818e7e3\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456690 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-trusted-ca\") pod \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\" (UID: \"0c96a75b-8c13-4da0-abcb-95855f1fbac5\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456716 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-utilities\") pod \"be44c776-1254-4da5-8bb4-e5b4c552a26f\" (UID: \"be44c776-1254-4da5-8bb4-e5b4c552a26f\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.456742 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-utilities\") pod \"94731d22-9a75-438c-88c1-3cc84818e7e3\" (UID: \"94731d22-9a75-438c-88c1-3cc84818e7e3\") " Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.458123 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-utilities" (OuterVolumeSpecName: "utilities") pod "94731d22-9a75-438c-88c1-3cc84818e7e3" (UID: "94731d22-9a75-438c-88c1-3cc84818e7e3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.459759 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be44c776-1254-4da5-8bb4-e5b4c552a26f-kube-api-access-v9qpn" (OuterVolumeSpecName: "kube-api-access-v9qpn") pod "be44c776-1254-4da5-8bb4-e5b4c552a26f" (UID: "be44c776-1254-4da5-8bb4-e5b4c552a26f"). InnerVolumeSpecName "kube-api-access-v9qpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.465121 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-utilities" (OuterVolumeSpecName: "utilities") pod "a7f95e23-8ec1-48cb-b206-123e5426d705" (UID: "a7f95e23-8ec1-48cb-b206-123e5426d705"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.468544 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "0c96a75b-8c13-4da0-abcb-95855f1fbac5" (UID: "0c96a75b-8c13-4da0-abcb-95855f1fbac5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.469450 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-utilities" (OuterVolumeSpecName: "utilities") pod "be44c776-1254-4da5-8bb4-e5b4c552a26f" (UID: "be44c776-1254-4da5-8bb4-e5b4c552a26f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.472940 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "0c96a75b-8c13-4da0-abcb-95855f1fbac5" (UID: "0c96a75b-8c13-4da0-abcb-95855f1fbac5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.478075 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94731d22-9a75-438c-88c1-3cc84818e7e3-kube-api-access-tt6dw" (OuterVolumeSpecName: "kube-api-access-tt6dw") pod "94731d22-9a75-438c-88c1-3cc84818e7e3" (UID: "94731d22-9a75-438c-88c1-3cc84818e7e3"). InnerVolumeSpecName "kube-api-access-tt6dw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.484036 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c96a75b-8c13-4da0-abcb-95855f1fbac5-kube-api-access-tbhmt" (OuterVolumeSpecName: "kube-api-access-tbhmt") pod "0c96a75b-8c13-4da0-abcb-95855f1fbac5" (UID: "0c96a75b-8c13-4da0-abcb-95855f1fbac5"). InnerVolumeSpecName "kube-api-access-tbhmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.493637 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7f95e23-8ec1-48cb-b206-123e5426d705-kube-api-access-6nftp" (OuterVolumeSpecName: "kube-api-access-6nftp") pod "a7f95e23-8ec1-48cb-b206-123e5426d705" (UID: "a7f95e23-8ec1-48cb-b206-123e5426d705"). InnerVolumeSpecName "kube-api-access-6nftp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.509585 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7f95e23-8ec1-48cb-b206-123e5426d705" (UID: "a7f95e23-8ec1-48cb-b206-123e5426d705"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559164 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbhmt\" (UniqueName: \"kubernetes.io/projected/0c96a75b-8c13-4da0-abcb-95855f1fbac5-kube-api-access-tbhmt\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559222 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559243 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nftp\" (UniqueName: \"kubernetes.io/projected/a7f95e23-8ec1-48cb-b206-123e5426d705-kube-api-access-6nftp\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559256 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7f95e23-8ec1-48cb-b206-123e5426d705-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559269 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt6dw\" (UniqueName: \"kubernetes.io/projected/94731d22-9a75-438c-88c1-3cc84818e7e3-kube-api-access-tt6dw\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559284 4711 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559297 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559310 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559325 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9qpn\" (UniqueName: \"kubernetes.io/projected/be44c776-1254-4da5-8bb4-e5b4c552a26f-kube-api-access-v9qpn\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.559338 4711 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0c96a75b-8c13-4da0-abcb-95855f1fbac5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.570929 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94731d22-9a75-438c-88c1-3cc84818e7e3" (UID: "94731d22-9a75-438c-88c1-3cc84818e7e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.618804 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "be44c776-1254-4da5-8bb4-e5b4c552a26f" (UID: "be44c776-1254-4da5-8bb4-e5b4c552a26f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.641989 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2hspf"] Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.660360 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be44c776-1254-4da5-8bb4-e5b4c552a26f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.660419 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94731d22-9a75-438c-88c1-3cc84818e7e3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.892549 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6wmqx" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.893681 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6wmqx" event={"ID":"24a27655-eff1-4912-9f6c-42f7d9e68ee3","Type":"ContainerDied","Data":"b5b21a6bb13fac7abb21065adde39b67f9c88c21a9de57fe84f7c523c8ece1f2"} Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.893742 4711 scope.go:117] "RemoveContainer" containerID="89489ace4f6304abbada80843c0298e2a741b00080f428f94335262463c9fdf8" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.896701 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" event={"ID":"cf774310-240f-4e72-9154-1321129d54dd","Type":"ContainerStarted","Data":"768262bdacf47f16877df843fee92a60adb927f4a9c7309c13a7d5aeba5bdf2b"} Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.896803 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" event={"ID":"cf774310-240f-4e72-9154-1321129d54dd","Type":"ContainerStarted","Data":"4a1dd7de0896704fa500b20fe6e9d0e6d682d24060a44f3611f186da597b8c26"} Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.897239 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.898290 4711 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2hspf container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.63:8080/healthz\": dial tcp 10.217.0.63:8080: connect: connection refused" start-of-body= Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.898347 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" podUID="cf774310-240f-4e72-9154-1321129d54dd" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.63:8080/healthz\": dial tcp 10.217.0.63:8080: connect: connection refused" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.900746 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrks9" event={"ID":"be44c776-1254-4da5-8bb4-e5b4c552a26f","Type":"ContainerDied","Data":"4b655ee0b430dad54336b6b0769d2048269e581dc1fa3d35ca1687242fe26b70"} Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.900850 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrks9" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.908755 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5n4h8" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.908770 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5n4h8" event={"ID":"a7f95e23-8ec1-48cb-b206-123e5426d705","Type":"ContainerDied","Data":"07f95c7ac46d02eb7d4a54fe32f56b7fe9f96abacf312dd98de8aceaa5a0f399"} Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.911479 4711 scope.go:117] "RemoveContainer" containerID="62df3976f9d74301bdd80f543381171ec8267dfda9c8ed49866fb802a0b78a1b" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.915373 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tl7dg" event={"ID":"94731d22-9a75-438c-88c1-3cc84818e7e3","Type":"ContainerDied","Data":"6855f94635ae61214b390b03d74fa09728e1ffc284868239dfdaf988dca96ed7"} Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.915733 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tl7dg" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.922656 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" event={"ID":"0c96a75b-8c13-4da0-abcb-95855f1fbac5","Type":"ContainerDied","Data":"809581b1d783d89cc69e48efe174c3d00813dc5b80e33dfb39c6286154b0bb47"} Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.922803 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dmv5p" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.926896 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6wmqx"] Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.930791 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6wmqx"] Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.943416 4711 scope.go:117] "RemoveContainer" containerID="100dfa4c05ee382ce23fa274fcbbe3e27e18cfcbbc2015f79d257f204d2af23d" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.952435 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" podStartSLOduration=1.9524082630000001 podStartE2EDuration="1.952408263s" podCreationTimestamp="2025-12-05 12:14:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:14:08.945662916 +0000 UTC m=+294.529985276" watchObservedRunningTime="2025-12-05 12:14:08.952408263 +0000 UTC m=+294.536730593" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.962803 4711 scope.go:117] "RemoveContainer" containerID="7f1b0ed993c58515c15fd7122c081937e9343b88555125ff37d997420c2beae7" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.970676 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qrks9"] Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.977615 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qrks9"] Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.985102 4711 scope.go:117] "RemoveContainer" containerID="ca712b7e90cabe7ab5760d401e36cfa4a94b01902cbf9b447c751fc4aa314fd2" Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.988276 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dmv5p"] Dec 05 12:14:08 crc kubenswrapper[4711]: I1205 12:14:08.998196 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dmv5p"] Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.001299 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tl7dg"] Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.005547 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tl7dg"] Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.008699 4711 scope.go:117] "RemoveContainer" containerID="49d69e782990b16ccd412814cf92092078ef32872d26b00a13a8430965bc9082" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.010185 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5n4h8"] Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.014224 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5n4h8"] Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.023537 4711 scope.go:117] "RemoveContainer" containerID="a0d5ea7f96afbd9dbcb35bc98f31e6917241ec9c53b836b2903ab5dca912a7fd" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.039637 4711 scope.go:117] "RemoveContainer" containerID="03d646c62c8173162ffb73872d2fb7059b643d805f7107328303e82ed87c1e18" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.054337 4711 scope.go:117] "RemoveContainer" containerID="901e6789a483035b3ccd0ca89e568166c609534add984ccdc9dffd41df4e1019" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.069361 4711 scope.go:117] "RemoveContainer" containerID="4c7b4de4b89df55d873da254c825d15296de6f197bbcf7cc534297e523c5e935" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.085933 4711 scope.go:117] "RemoveContainer" containerID="2670fc0fdd5d13bbebb7d69ad160b30f2409ccbc3bf3cf69de2beaad19c12464" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.110106 4711 scope.go:117] "RemoveContainer" containerID="fab15334d942c5c8e154f3d25329f8e3fc4d3bc88489d3c262592b95e0745203" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.126040 4711 scope.go:117] "RemoveContainer" containerID="c5c210281ec5fb1e8b34c82934e1f46ea9846998cca26bf43f88d58e23ac9ab8" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.768553 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ks7xz"] Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.771949 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerName="marketplace-operator" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.771994 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerName="marketplace-operator" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772007 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerName="marketplace-operator" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772014 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerName="marketplace-operator" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772023 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772031 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772040 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerName="extract-utilities" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772047 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerName="extract-utilities" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772055 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772061 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772070 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerName="extract-content" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772076 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerName="extract-content" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772083 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerName="extract-utilities" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772089 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerName="extract-utilities" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772098 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerName="extract-utilities" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772104 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerName="extract-utilities" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772112 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerName="extract-content" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772118 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerName="extract-content" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772130 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerName="extract-content" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772137 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerName="extract-content" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772143 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerName="extract-content" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772148 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerName="extract-content" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772156 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772161 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772171 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerName="extract-utilities" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772176 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerName="extract-utilities" Dec 05 12:14:09 crc kubenswrapper[4711]: E1205 12:14:09.772190 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772196 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772313 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772334 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772345 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerName="marketplace-operator" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772354 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772363 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" containerName="registry-server" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.772376 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" containerName="marketplace-operator" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.773725 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.774367 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ks7xz"] Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.778873 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.890960 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92da7542-202f-4d2e-b3a1-f300483f4523-catalog-content\") pod \"redhat-marketplace-ks7xz\" (UID: \"92da7542-202f-4d2e-b3a1-f300483f4523\") " pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.891024 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4l8g\" (UniqueName: \"kubernetes.io/projected/92da7542-202f-4d2e-b3a1-f300483f4523-kube-api-access-c4l8g\") pod \"redhat-marketplace-ks7xz\" (UID: \"92da7542-202f-4d2e-b3a1-f300483f4523\") " pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.891048 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92da7542-202f-4d2e-b3a1-f300483f4523-utilities\") pod \"redhat-marketplace-ks7xz\" (UID: \"92da7542-202f-4d2e-b3a1-f300483f4523\") " pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.937117 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-2hspf" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.960365 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-72h44"] Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.967821 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.973190 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.978789 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-72h44"] Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.992539 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92da7542-202f-4d2e-b3a1-f300483f4523-catalog-content\") pod \"redhat-marketplace-ks7xz\" (UID: \"92da7542-202f-4d2e-b3a1-f300483f4523\") " pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.992628 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4l8g\" (UniqueName: \"kubernetes.io/projected/92da7542-202f-4d2e-b3a1-f300483f4523-kube-api-access-c4l8g\") pod \"redhat-marketplace-ks7xz\" (UID: \"92da7542-202f-4d2e-b3a1-f300483f4523\") " pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.992657 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92da7542-202f-4d2e-b3a1-f300483f4523-utilities\") pod \"redhat-marketplace-ks7xz\" (UID: \"92da7542-202f-4d2e-b3a1-f300483f4523\") " pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.993246 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92da7542-202f-4d2e-b3a1-f300483f4523-catalog-content\") pod \"redhat-marketplace-ks7xz\" (UID: \"92da7542-202f-4d2e-b3a1-f300483f4523\") " pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:09 crc kubenswrapper[4711]: I1205 12:14:09.993266 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92da7542-202f-4d2e-b3a1-f300483f4523-utilities\") pod \"redhat-marketplace-ks7xz\" (UID: \"92da7542-202f-4d2e-b3a1-f300483f4523\") " pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.017707 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4l8g\" (UniqueName: \"kubernetes.io/projected/92da7542-202f-4d2e-b3a1-f300483f4523-kube-api-access-c4l8g\") pod \"redhat-marketplace-ks7xz\" (UID: \"92da7542-202f-4d2e-b3a1-f300483f4523\") " pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.093496 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2a49cc0-5d95-4923-a909-0bf5988aed8d-utilities\") pod \"redhat-operators-72h44\" (UID: \"b2a49cc0-5d95-4923-a909-0bf5988aed8d\") " pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.093582 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2a49cc0-5d95-4923-a909-0bf5988aed8d-catalog-content\") pod \"redhat-operators-72h44\" (UID: \"b2a49cc0-5d95-4923-a909-0bf5988aed8d\") " pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.093904 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c49w5\" (UniqueName: \"kubernetes.io/projected/b2a49cc0-5d95-4923-a909-0bf5988aed8d-kube-api-access-c49w5\") pod \"redhat-operators-72h44\" (UID: \"b2a49cc0-5d95-4923-a909-0bf5988aed8d\") " pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.103354 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.195326 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c49w5\" (UniqueName: \"kubernetes.io/projected/b2a49cc0-5d95-4923-a909-0bf5988aed8d-kube-api-access-c49w5\") pod \"redhat-operators-72h44\" (UID: \"b2a49cc0-5d95-4923-a909-0bf5988aed8d\") " pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.195418 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2a49cc0-5d95-4923-a909-0bf5988aed8d-utilities\") pod \"redhat-operators-72h44\" (UID: \"b2a49cc0-5d95-4923-a909-0bf5988aed8d\") " pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.195446 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2a49cc0-5d95-4923-a909-0bf5988aed8d-catalog-content\") pod \"redhat-operators-72h44\" (UID: \"b2a49cc0-5d95-4923-a909-0bf5988aed8d\") " pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.195981 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2a49cc0-5d95-4923-a909-0bf5988aed8d-utilities\") pod \"redhat-operators-72h44\" (UID: \"b2a49cc0-5d95-4923-a909-0bf5988aed8d\") " pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.196071 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2a49cc0-5d95-4923-a909-0bf5988aed8d-catalog-content\") pod \"redhat-operators-72h44\" (UID: \"b2a49cc0-5d95-4923-a909-0bf5988aed8d\") " pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.215417 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c49w5\" (UniqueName: \"kubernetes.io/projected/b2a49cc0-5d95-4923-a909-0bf5988aed8d-kube-api-access-c49w5\") pod \"redhat-operators-72h44\" (UID: \"b2a49cc0-5d95-4923-a909-0bf5988aed8d\") " pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.288968 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.514780 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ks7xz"] Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.694106 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c96a75b-8c13-4da0-abcb-95855f1fbac5" path="/var/lib/kubelet/pods/0c96a75b-8c13-4da0-abcb-95855f1fbac5/volumes" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.694830 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24a27655-eff1-4912-9f6c-42f7d9e68ee3" path="/var/lib/kubelet/pods/24a27655-eff1-4912-9f6c-42f7d9e68ee3/volumes" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.695705 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94731d22-9a75-438c-88c1-3cc84818e7e3" path="/var/lib/kubelet/pods/94731d22-9a75-438c-88c1-3cc84818e7e3/volumes" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.697051 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7f95e23-8ec1-48cb-b206-123e5426d705" path="/var/lib/kubelet/pods/a7f95e23-8ec1-48cb-b206-123e5426d705/volumes" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.697725 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be44c776-1254-4da5-8bb4-e5b4c552a26f" path="/var/lib/kubelet/pods/be44c776-1254-4da5-8bb4-e5b4c552a26f/volumes" Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.738533 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-72h44"] Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.941272 4711 generic.go:334] "Generic (PLEG): container finished" podID="92da7542-202f-4d2e-b3a1-f300483f4523" containerID="019853ee1282bfbf12f77233399775f7332f5abb57d14729b81350dd4a845236" exitCode=0 Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.941549 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ks7xz" event={"ID":"92da7542-202f-4d2e-b3a1-f300483f4523","Type":"ContainerDied","Data":"019853ee1282bfbf12f77233399775f7332f5abb57d14729b81350dd4a845236"} Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.941837 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ks7xz" event={"ID":"92da7542-202f-4d2e-b3a1-f300483f4523","Type":"ContainerStarted","Data":"0c46838d348bfc89e83a4aab05b68175f66d622ddcaf96b8987d3cf828a81f86"} Dec 05 12:14:10 crc kubenswrapper[4711]: I1205 12:14:10.945764 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72h44" event={"ID":"b2a49cc0-5d95-4923-a909-0bf5988aed8d","Type":"ContainerStarted","Data":"8f9ca22dfc29cfaa8e2851d562aaf7eee8caf2a7ae519931459e589e79ef6137"} Dec 05 12:14:11 crc kubenswrapper[4711]: I1205 12:14:11.955924 4711 generic.go:334] "Generic (PLEG): container finished" podID="92da7542-202f-4d2e-b3a1-f300483f4523" containerID="d81442c6165206a06edaa41867046133b5092e6142e2b8b773e33593f78bd008" exitCode=0 Dec 05 12:14:11 crc kubenswrapper[4711]: I1205 12:14:11.955983 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ks7xz" event={"ID":"92da7542-202f-4d2e-b3a1-f300483f4523","Type":"ContainerDied","Data":"d81442c6165206a06edaa41867046133b5092e6142e2b8b773e33593f78bd008"} Dec 05 12:14:11 crc kubenswrapper[4711]: I1205 12:14:11.958558 4711 generic.go:334] "Generic (PLEG): container finished" podID="b2a49cc0-5d95-4923-a909-0bf5988aed8d" containerID="fdc765c7a194d9a550a9c7d02cd8e7b20a809aaea2538ab577d8b238af1d14ce" exitCode=0 Dec 05 12:14:11 crc kubenswrapper[4711]: I1205 12:14:11.958614 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72h44" event={"ID":"b2a49cc0-5d95-4923-a909-0bf5988aed8d","Type":"ContainerDied","Data":"fdc765c7a194d9a550a9c7d02cd8e7b20a809aaea2538ab577d8b238af1d14ce"} Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.160254 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5wtcc"] Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.161554 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.164316 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.177245 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5wtcc"] Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.231172 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec151a45-6363-4aab-8672-d13b8be04c7d-utilities\") pod \"certified-operators-5wtcc\" (UID: \"ec151a45-6363-4aab-8672-d13b8be04c7d\") " pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.231460 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kprgk\" (UniqueName: \"kubernetes.io/projected/ec151a45-6363-4aab-8672-d13b8be04c7d-kube-api-access-kprgk\") pod \"certified-operators-5wtcc\" (UID: \"ec151a45-6363-4aab-8672-d13b8be04c7d\") " pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.231521 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec151a45-6363-4aab-8672-d13b8be04c7d-catalog-content\") pod \"certified-operators-5wtcc\" (UID: \"ec151a45-6363-4aab-8672-d13b8be04c7d\") " pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.333035 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kprgk\" (UniqueName: \"kubernetes.io/projected/ec151a45-6363-4aab-8672-d13b8be04c7d-kube-api-access-kprgk\") pod \"certified-operators-5wtcc\" (UID: \"ec151a45-6363-4aab-8672-d13b8be04c7d\") " pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.333109 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec151a45-6363-4aab-8672-d13b8be04c7d-catalog-content\") pod \"certified-operators-5wtcc\" (UID: \"ec151a45-6363-4aab-8672-d13b8be04c7d\") " pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.333187 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec151a45-6363-4aab-8672-d13b8be04c7d-utilities\") pod \"certified-operators-5wtcc\" (UID: \"ec151a45-6363-4aab-8672-d13b8be04c7d\") " pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.333857 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec151a45-6363-4aab-8672-d13b8be04c7d-utilities\") pod \"certified-operators-5wtcc\" (UID: \"ec151a45-6363-4aab-8672-d13b8be04c7d\") " pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.334030 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec151a45-6363-4aab-8672-d13b8be04c7d-catalog-content\") pod \"certified-operators-5wtcc\" (UID: \"ec151a45-6363-4aab-8672-d13b8be04c7d\") " pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.357011 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kprgk\" (UniqueName: \"kubernetes.io/projected/ec151a45-6363-4aab-8672-d13b8be04c7d-kube-api-access-kprgk\") pod \"certified-operators-5wtcc\" (UID: \"ec151a45-6363-4aab-8672-d13b8be04c7d\") " pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.365765 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6r649"] Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.367030 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.369634 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.374664 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6r649"] Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.481199 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.536287 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a670e5d-c2a5-4d57-84a0-727c564f7325-utilities\") pod \"community-operators-6r649\" (UID: \"9a670e5d-c2a5-4d57-84a0-727c564f7325\") " pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.536410 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a670e5d-c2a5-4d57-84a0-727c564f7325-catalog-content\") pod \"community-operators-6r649\" (UID: \"9a670e5d-c2a5-4d57-84a0-727c564f7325\") " pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.536469 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b2wd\" (UniqueName: \"kubernetes.io/projected/9a670e5d-c2a5-4d57-84a0-727c564f7325-kube-api-access-8b2wd\") pod \"community-operators-6r649\" (UID: \"9a670e5d-c2a5-4d57-84a0-727c564f7325\") " pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.637410 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b2wd\" (UniqueName: \"kubernetes.io/projected/9a670e5d-c2a5-4d57-84a0-727c564f7325-kube-api-access-8b2wd\") pod \"community-operators-6r649\" (UID: \"9a670e5d-c2a5-4d57-84a0-727c564f7325\") " pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.637504 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a670e5d-c2a5-4d57-84a0-727c564f7325-utilities\") pod \"community-operators-6r649\" (UID: \"9a670e5d-c2a5-4d57-84a0-727c564f7325\") " pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.637539 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a670e5d-c2a5-4d57-84a0-727c564f7325-catalog-content\") pod \"community-operators-6r649\" (UID: \"9a670e5d-c2a5-4d57-84a0-727c564f7325\") " pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.638181 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a670e5d-c2a5-4d57-84a0-727c564f7325-catalog-content\") pod \"community-operators-6r649\" (UID: \"9a670e5d-c2a5-4d57-84a0-727c564f7325\") " pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.638589 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a670e5d-c2a5-4d57-84a0-727c564f7325-utilities\") pod \"community-operators-6r649\" (UID: \"9a670e5d-c2a5-4d57-84a0-727c564f7325\") " pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.657876 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b2wd\" (UniqueName: \"kubernetes.io/projected/9a670e5d-c2a5-4d57-84a0-727c564f7325-kube-api-access-8b2wd\") pod \"community-operators-6r649\" (UID: \"9a670e5d-c2a5-4d57-84a0-727c564f7325\") " pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.706815 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.934703 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5wtcc"] Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.974428 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5wtcc" event={"ID":"ec151a45-6363-4aab-8672-d13b8be04c7d","Type":"ContainerStarted","Data":"743478a0aa7bc50c732d59eefcd1f729df88c842b0dc6fd3b34cca6e2df6ed4d"} Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.979584 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ks7xz" event={"ID":"92da7542-202f-4d2e-b3a1-f300483f4523","Type":"ContainerStarted","Data":"15b0e921d24fbaa30e34ffc62fd0d2e72203c293fb45128dc5b30cf9f5753fa0"} Dec 05 12:14:12 crc kubenswrapper[4711]: I1205 12:14:12.988056 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72h44" event={"ID":"b2a49cc0-5d95-4923-a909-0bf5988aed8d","Type":"ContainerStarted","Data":"3b5fa39d1953e81f5c2a56d05b787688ad1790ae8ee36df68c5db51714ca5784"} Dec 05 12:14:13 crc kubenswrapper[4711]: I1205 12:14:13.003491 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ks7xz" podStartSLOduration=2.557621993 podStartE2EDuration="4.003471251s" podCreationTimestamp="2025-12-05 12:14:09 +0000 UTC" firstStartedPulling="2025-12-05 12:14:10.943425975 +0000 UTC m=+296.527748305" lastFinishedPulling="2025-12-05 12:14:12.389275233 +0000 UTC m=+297.973597563" observedRunningTime="2025-12-05 12:14:13.000894007 +0000 UTC m=+298.585216337" watchObservedRunningTime="2025-12-05 12:14:13.003471251 +0000 UTC m=+298.587793581" Dec 05 12:14:13 crc kubenswrapper[4711]: I1205 12:14:13.213842 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6r649"] Dec 05 12:14:13 crc kubenswrapper[4711]: W1205 12:14:13.221400 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a670e5d_c2a5_4d57_84a0_727c564f7325.slice/crio-97be39fd58294afaa1d5ffd12cc09063736e815d5c0c04ae744113c369955355 WatchSource:0}: Error finding container 97be39fd58294afaa1d5ffd12cc09063736e815d5c0c04ae744113c369955355: Status 404 returned error can't find the container with id 97be39fd58294afaa1d5ffd12cc09063736e815d5c0c04ae744113c369955355 Dec 05 12:14:13 crc kubenswrapper[4711]: I1205 12:14:13.997433 4711 generic.go:334] "Generic (PLEG): container finished" podID="ec151a45-6363-4aab-8672-d13b8be04c7d" containerID="d962986bf39341fd07283344d0c24b851a12ebb8e2b0744118cb71eef4ee6772" exitCode=0 Dec 05 12:14:13 crc kubenswrapper[4711]: I1205 12:14:13.997535 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5wtcc" event={"ID":"ec151a45-6363-4aab-8672-d13b8be04c7d","Type":"ContainerDied","Data":"d962986bf39341fd07283344d0c24b851a12ebb8e2b0744118cb71eef4ee6772"} Dec 05 12:14:14 crc kubenswrapper[4711]: I1205 12:14:14.001333 4711 generic.go:334] "Generic (PLEG): container finished" podID="9a670e5d-c2a5-4d57-84a0-727c564f7325" containerID="2aa49ea62f2f31263fef6bf2b7b628771f19b282b6fa3d6a68d6147d1d6ac5bd" exitCode=0 Dec 05 12:14:14 crc kubenswrapper[4711]: I1205 12:14:14.001448 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6r649" event={"ID":"9a670e5d-c2a5-4d57-84a0-727c564f7325","Type":"ContainerDied","Data":"2aa49ea62f2f31263fef6bf2b7b628771f19b282b6fa3d6a68d6147d1d6ac5bd"} Dec 05 12:14:14 crc kubenswrapper[4711]: I1205 12:14:14.001487 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6r649" event={"ID":"9a670e5d-c2a5-4d57-84a0-727c564f7325","Type":"ContainerStarted","Data":"97be39fd58294afaa1d5ffd12cc09063736e815d5c0c04ae744113c369955355"} Dec 05 12:14:14 crc kubenswrapper[4711]: I1205 12:14:14.008417 4711 generic.go:334] "Generic (PLEG): container finished" podID="b2a49cc0-5d95-4923-a909-0bf5988aed8d" containerID="3b5fa39d1953e81f5c2a56d05b787688ad1790ae8ee36df68c5db51714ca5784" exitCode=0 Dec 05 12:14:14 crc kubenswrapper[4711]: I1205 12:14:14.010192 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72h44" event={"ID":"b2a49cc0-5d95-4923-a909-0bf5988aed8d","Type":"ContainerDied","Data":"3b5fa39d1953e81f5c2a56d05b787688ad1790ae8ee36df68c5db51714ca5784"} Dec 05 12:14:14 crc kubenswrapper[4711]: I1205 12:14:14.868735 4711 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 05 12:14:16 crc kubenswrapper[4711]: I1205 12:14:16.024017 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72h44" event={"ID":"b2a49cc0-5d95-4923-a909-0bf5988aed8d","Type":"ContainerStarted","Data":"657f1b55f8653854901ef47f801c79b9e8174be3a8ab9ca94b139ae0581c47ee"} Dec 05 12:14:16 crc kubenswrapper[4711]: I1205 12:14:16.027961 4711 generic.go:334] "Generic (PLEG): container finished" podID="ec151a45-6363-4aab-8672-d13b8be04c7d" containerID="270034631e15d15975459a988272d25f485499457da4d64c3e1dd1b0a03f66f2" exitCode=0 Dec 05 12:14:16 crc kubenswrapper[4711]: I1205 12:14:16.028073 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5wtcc" event={"ID":"ec151a45-6363-4aab-8672-d13b8be04c7d","Type":"ContainerDied","Data":"270034631e15d15975459a988272d25f485499457da4d64c3e1dd1b0a03f66f2"} Dec 05 12:14:16 crc kubenswrapper[4711]: I1205 12:14:16.031654 4711 generic.go:334] "Generic (PLEG): container finished" podID="9a670e5d-c2a5-4d57-84a0-727c564f7325" containerID="31fc0ef0144f348165c7a83afa3e652430de14d63a085d2504b9783dabe2fac7" exitCode=0 Dec 05 12:14:16 crc kubenswrapper[4711]: I1205 12:14:16.031722 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6r649" event={"ID":"9a670e5d-c2a5-4d57-84a0-727c564f7325","Type":"ContainerDied","Data":"31fc0ef0144f348165c7a83afa3e652430de14d63a085d2504b9783dabe2fac7"} Dec 05 12:14:16 crc kubenswrapper[4711]: I1205 12:14:16.053018 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-72h44" podStartSLOduration=3.641219382 podStartE2EDuration="7.052987249s" podCreationTimestamp="2025-12-05 12:14:09 +0000 UTC" firstStartedPulling="2025-12-05 12:14:11.96042944 +0000 UTC m=+297.544751790" lastFinishedPulling="2025-12-05 12:14:15.372197327 +0000 UTC m=+300.956519657" observedRunningTime="2025-12-05 12:14:16.051573074 +0000 UTC m=+301.635895404" watchObservedRunningTime="2025-12-05 12:14:16.052987249 +0000 UTC m=+301.637309579" Dec 05 12:14:18 crc kubenswrapper[4711]: I1205 12:14:18.044936 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5wtcc" event={"ID":"ec151a45-6363-4aab-8672-d13b8be04c7d","Type":"ContainerStarted","Data":"0909062d2c8c96d66b97237d2edf233d88eda71b3e630289a44621dec52a67d4"} Dec 05 12:14:18 crc kubenswrapper[4711]: I1205 12:14:18.046867 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6r649" event={"ID":"9a670e5d-c2a5-4d57-84a0-727c564f7325","Type":"ContainerStarted","Data":"2e574e9ce43d5e284937b869bc71eb98577f52900d2baa11e5edf1a240f61885"} Dec 05 12:14:18 crc kubenswrapper[4711]: I1205 12:14:18.070427 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5wtcc" podStartSLOduration=3.593804862 podStartE2EDuration="6.070378615s" podCreationTimestamp="2025-12-05 12:14:12 +0000 UTC" firstStartedPulling="2025-12-05 12:14:13.999800133 +0000 UTC m=+299.584122463" lastFinishedPulling="2025-12-05 12:14:16.476373886 +0000 UTC m=+302.060696216" observedRunningTime="2025-12-05 12:14:18.06776513 +0000 UTC m=+303.652087490" watchObservedRunningTime="2025-12-05 12:14:18.070378615 +0000 UTC m=+303.654700945" Dec 05 12:14:18 crc kubenswrapper[4711]: I1205 12:14:18.093654 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6r649" podStartSLOduration=3.663682376 podStartE2EDuration="6.093626432s" podCreationTimestamp="2025-12-05 12:14:12 +0000 UTC" firstStartedPulling="2025-12-05 12:14:14.003528415 +0000 UTC m=+299.587850735" lastFinishedPulling="2025-12-05 12:14:16.433472461 +0000 UTC m=+302.017794791" observedRunningTime="2025-12-05 12:14:18.086937347 +0000 UTC m=+303.671259697" watchObservedRunningTime="2025-12-05 12:14:18.093626432 +0000 UTC m=+303.677948762" Dec 05 12:14:20 crc kubenswrapper[4711]: I1205 12:14:20.103770 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:20 crc kubenswrapper[4711]: I1205 12:14:20.105645 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:20 crc kubenswrapper[4711]: I1205 12:14:20.160472 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:20 crc kubenswrapper[4711]: I1205 12:14:20.289276 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:20 crc kubenswrapper[4711]: I1205 12:14:20.289748 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:21 crc kubenswrapper[4711]: I1205 12:14:21.137088 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ks7xz" Dec 05 12:14:21 crc kubenswrapper[4711]: I1205 12:14:21.329654 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-72h44" podUID="b2a49cc0-5d95-4923-a909-0bf5988aed8d" containerName="registry-server" probeResult="failure" output=< Dec 05 12:14:21 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 12:14:21 crc kubenswrapper[4711]: > Dec 05 12:14:22 crc kubenswrapper[4711]: I1205 12:14:22.482026 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:22 crc kubenswrapper[4711]: I1205 12:14:22.482094 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:22 crc kubenswrapper[4711]: I1205 12:14:22.522307 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:22 crc kubenswrapper[4711]: I1205 12:14:22.707836 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:22 crc kubenswrapper[4711]: I1205 12:14:22.707890 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:22 crc kubenswrapper[4711]: I1205 12:14:22.748509 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:23 crc kubenswrapper[4711]: I1205 12:14:23.149325 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5wtcc" Dec 05 12:14:23 crc kubenswrapper[4711]: I1205 12:14:23.150339 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6r649" Dec 05 12:14:23 crc kubenswrapper[4711]: I1205 12:14:23.563291 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-5nhvg" Dec 05 12:14:23 crc kubenswrapper[4711]: I1205 12:14:23.624302 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m7fx2"] Dec 05 12:14:30 crc kubenswrapper[4711]: I1205 12:14:30.327949 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:30 crc kubenswrapper[4711]: I1205 12:14:30.373928 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-72h44" Dec 05 12:14:48 crc kubenswrapper[4711]: I1205 12:14:48.301268 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:14:48 crc kubenswrapper[4711]: I1205 12:14:48.302233 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:14:48 crc kubenswrapper[4711]: I1205 12:14:48.666815 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" podUID="259a4e64-67b8-417e-8948-4cc028bb728d" containerName="registry" containerID="cri-o://0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006" gracePeriod=30 Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.065514 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.092906 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-registry-tls\") pod \"259a4e64-67b8-417e-8948-4cc028bb728d\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.092985 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-bound-sa-token\") pod \"259a4e64-67b8-417e-8948-4cc028bb728d\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.093042 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/259a4e64-67b8-417e-8948-4cc028bb728d-installation-pull-secrets\") pod \"259a4e64-67b8-417e-8948-4cc028bb728d\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.093281 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"259a4e64-67b8-417e-8948-4cc028bb728d\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.093349 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-trusted-ca\") pod \"259a4e64-67b8-417e-8948-4cc028bb728d\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.093509 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-registry-certificates\") pod \"259a4e64-67b8-417e-8948-4cc028bb728d\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.093569 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stsln\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-kube-api-access-stsln\") pod \"259a4e64-67b8-417e-8948-4cc028bb728d\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.093604 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/259a4e64-67b8-417e-8948-4cc028bb728d-ca-trust-extracted\") pod \"259a4e64-67b8-417e-8948-4cc028bb728d\" (UID: \"259a4e64-67b8-417e-8948-4cc028bb728d\") " Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.095190 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "259a4e64-67b8-417e-8948-4cc028bb728d" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.096936 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "259a4e64-67b8-417e-8948-4cc028bb728d" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.117592 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/259a4e64-67b8-417e-8948-4cc028bb728d-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "259a4e64-67b8-417e-8948-4cc028bb728d" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.119783 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/259a4e64-67b8-417e-8948-4cc028bb728d-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "259a4e64-67b8-417e-8948-4cc028bb728d" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.120297 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "259a4e64-67b8-417e-8948-4cc028bb728d" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.120901 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-kube-api-access-stsln" (OuterVolumeSpecName: "kube-api-access-stsln") pod "259a4e64-67b8-417e-8948-4cc028bb728d" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d"). InnerVolumeSpecName "kube-api-access-stsln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.121139 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "259a4e64-67b8-417e-8948-4cc028bb728d" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.126516 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "259a4e64-67b8-417e-8948-4cc028bb728d" (UID: "259a4e64-67b8-417e-8948-4cc028bb728d"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.195081 4711 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.195156 4711 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.195168 4711 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/259a4e64-67b8-417e-8948-4cc028bb728d-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.195189 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.195198 4711 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/259a4e64-67b8-417e-8948-4cc028bb728d-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.195208 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stsln\" (UniqueName: \"kubernetes.io/projected/259a4e64-67b8-417e-8948-4cc028bb728d-kube-api-access-stsln\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.195219 4711 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/259a4e64-67b8-417e-8948-4cc028bb728d-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.270145 4711 generic.go:334] "Generic (PLEG): container finished" podID="259a4e64-67b8-417e-8948-4cc028bb728d" containerID="0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006" exitCode=0 Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.270222 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" event={"ID":"259a4e64-67b8-417e-8948-4cc028bb728d","Type":"ContainerDied","Data":"0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006"} Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.270238 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.270272 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m7fx2" event={"ID":"259a4e64-67b8-417e-8948-4cc028bb728d","Type":"ContainerDied","Data":"a116c2f9df17bd018bf211a6d1e28ec1cc228b4a14dd7be37e9be97e204565f1"} Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.270306 4711 scope.go:117] "RemoveContainer" containerID="0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.298262 4711 scope.go:117] "RemoveContainer" containerID="0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006" Dec 05 12:14:49 crc kubenswrapper[4711]: E1205 12:14:49.299097 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006\": container with ID starting with 0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006 not found: ID does not exist" containerID="0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.299158 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006"} err="failed to get container status \"0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006\": rpc error: code = NotFound desc = could not find container \"0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006\": container with ID starting with 0814ad06750a0eaa8e7467593c170d518a4fe7921b40a9f05824870f0b79b006 not found: ID does not exist" Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.318240 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m7fx2"] Dec 05 12:14:49 crc kubenswrapper[4711]: I1205 12:14:49.321215 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m7fx2"] Dec 05 12:14:50 crc kubenswrapper[4711]: I1205 12:14:50.698465 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="259a4e64-67b8-417e-8948-4cc028bb728d" path="/var/lib/kubelet/pods/259a4e64-67b8-417e-8948-4cc028bb728d/volumes" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.170622 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw"] Dec 05 12:15:00 crc kubenswrapper[4711]: E1205 12:15:00.171579 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="259a4e64-67b8-417e-8948-4cc028bb728d" containerName="registry" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.171596 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="259a4e64-67b8-417e-8948-4cc028bb728d" containerName="registry" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.171761 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="259a4e64-67b8-417e-8948-4cc028bb728d" containerName="registry" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.172315 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.174672 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.174871 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.181566 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw"] Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.348454 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05e59f9-7440-4bce-8767-34085269a17f-secret-volume\") pod \"collect-profiles-29415615-b9zcw\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.348794 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m7wq\" (UniqueName: \"kubernetes.io/projected/c05e59f9-7440-4bce-8767-34085269a17f-kube-api-access-9m7wq\") pod \"collect-profiles-29415615-b9zcw\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.348935 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05e59f9-7440-4bce-8767-34085269a17f-config-volume\") pod \"collect-profiles-29415615-b9zcw\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.450664 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05e59f9-7440-4bce-8767-34085269a17f-secret-volume\") pod \"collect-profiles-29415615-b9zcw\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.450737 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m7wq\" (UniqueName: \"kubernetes.io/projected/c05e59f9-7440-4bce-8767-34085269a17f-kube-api-access-9m7wq\") pod \"collect-profiles-29415615-b9zcw\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.450796 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05e59f9-7440-4bce-8767-34085269a17f-config-volume\") pod \"collect-profiles-29415615-b9zcw\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.451723 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05e59f9-7440-4bce-8767-34085269a17f-config-volume\") pod \"collect-profiles-29415615-b9zcw\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.456833 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05e59f9-7440-4bce-8767-34085269a17f-secret-volume\") pod \"collect-profiles-29415615-b9zcw\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.476937 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m7wq\" (UniqueName: \"kubernetes.io/projected/c05e59f9-7440-4bce-8767-34085269a17f-kube-api-access-9m7wq\") pod \"collect-profiles-29415615-b9zcw\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.493413 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:00 crc kubenswrapper[4711]: I1205 12:15:00.937947 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw"] Dec 05 12:15:00 crc kubenswrapper[4711]: W1205 12:15:00.950026 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc05e59f9_7440_4bce_8767_34085269a17f.slice/crio-b89d14f9d06f36bea53ee29facb4fb3d03ac16c77af0f71d427a6dc67e7339fe WatchSource:0}: Error finding container b89d14f9d06f36bea53ee29facb4fb3d03ac16c77af0f71d427a6dc67e7339fe: Status 404 returned error can't find the container with id b89d14f9d06f36bea53ee29facb4fb3d03ac16c77af0f71d427a6dc67e7339fe Dec 05 12:15:01 crc kubenswrapper[4711]: I1205 12:15:01.359416 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" event={"ID":"c05e59f9-7440-4bce-8767-34085269a17f","Type":"ContainerStarted","Data":"7882ec2094167fdbdcf01ed01d1559868993309656d9d9f86e571facffce244f"} Dec 05 12:15:01 crc kubenswrapper[4711]: I1205 12:15:01.359908 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" event={"ID":"c05e59f9-7440-4bce-8767-34085269a17f","Type":"ContainerStarted","Data":"b89d14f9d06f36bea53ee29facb4fb3d03ac16c77af0f71d427a6dc67e7339fe"} Dec 05 12:15:01 crc kubenswrapper[4711]: I1205 12:15:01.378699 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" podStartSLOduration=1.378677154 podStartE2EDuration="1.378677154s" podCreationTimestamp="2025-12-05 12:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:15:01.376362336 +0000 UTC m=+346.960684676" watchObservedRunningTime="2025-12-05 12:15:01.378677154 +0000 UTC m=+346.962999484" Dec 05 12:15:02 crc kubenswrapper[4711]: I1205 12:15:02.366753 4711 generic.go:334] "Generic (PLEG): container finished" podID="c05e59f9-7440-4bce-8767-34085269a17f" containerID="7882ec2094167fdbdcf01ed01d1559868993309656d9d9f86e571facffce244f" exitCode=0 Dec 05 12:15:02 crc kubenswrapper[4711]: I1205 12:15:02.366841 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" event={"ID":"c05e59f9-7440-4bce-8767-34085269a17f","Type":"ContainerDied","Data":"7882ec2094167fdbdcf01ed01d1559868993309656d9d9f86e571facffce244f"} Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.600661 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.791106 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05e59f9-7440-4bce-8767-34085269a17f-config-volume\") pod \"c05e59f9-7440-4bce-8767-34085269a17f\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.791422 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9m7wq\" (UniqueName: \"kubernetes.io/projected/c05e59f9-7440-4bce-8767-34085269a17f-kube-api-access-9m7wq\") pod \"c05e59f9-7440-4bce-8767-34085269a17f\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.791590 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05e59f9-7440-4bce-8767-34085269a17f-secret-volume\") pod \"c05e59f9-7440-4bce-8767-34085269a17f\" (UID: \"c05e59f9-7440-4bce-8767-34085269a17f\") " Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.792199 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c05e59f9-7440-4bce-8767-34085269a17f-config-volume" (OuterVolumeSpecName: "config-volume") pod "c05e59f9-7440-4bce-8767-34085269a17f" (UID: "c05e59f9-7440-4bce-8767-34085269a17f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.800252 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c05e59f9-7440-4bce-8767-34085269a17f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c05e59f9-7440-4bce-8767-34085269a17f" (UID: "c05e59f9-7440-4bce-8767-34085269a17f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.800269 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c05e59f9-7440-4bce-8767-34085269a17f-kube-api-access-9m7wq" (OuterVolumeSpecName: "kube-api-access-9m7wq") pod "c05e59f9-7440-4bce-8767-34085269a17f" (UID: "c05e59f9-7440-4bce-8767-34085269a17f"). InnerVolumeSpecName "kube-api-access-9m7wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.893508 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9m7wq\" (UniqueName: \"kubernetes.io/projected/c05e59f9-7440-4bce-8767-34085269a17f-kube-api-access-9m7wq\") on node \"crc\" DevicePath \"\"" Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.893975 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05e59f9-7440-4bce-8767-34085269a17f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:15:03 crc kubenswrapper[4711]: I1205 12:15:03.894061 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05e59f9-7440-4bce-8767-34085269a17f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:15:04 crc kubenswrapper[4711]: I1205 12:15:04.378769 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" event={"ID":"c05e59f9-7440-4bce-8767-34085269a17f","Type":"ContainerDied","Data":"b89d14f9d06f36bea53ee29facb4fb3d03ac16c77af0f71d427a6dc67e7339fe"} Dec 05 12:15:04 crc kubenswrapper[4711]: I1205 12:15:04.378805 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw" Dec 05 12:15:04 crc kubenswrapper[4711]: I1205 12:15:04.378822 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b89d14f9d06f36bea53ee29facb4fb3d03ac16c77af0f71d427a6dc67e7339fe" Dec 05 12:15:18 crc kubenswrapper[4711]: I1205 12:15:18.301340 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:15:18 crc kubenswrapper[4711]: I1205 12:15:18.302051 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:15:48 crc kubenswrapper[4711]: I1205 12:15:48.301588 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:15:48 crc kubenswrapper[4711]: I1205 12:15:48.302299 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:15:48 crc kubenswrapper[4711]: I1205 12:15:48.302370 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:15:48 crc kubenswrapper[4711]: I1205 12:15:48.303238 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c792b1a4c6f9cbaaf0f7997c6c155794129b76126c2b4c55c97bfc3b4edb1356"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:15:48 crc kubenswrapper[4711]: I1205 12:15:48.303339 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://c792b1a4c6f9cbaaf0f7997c6c155794129b76126c2b4c55c97bfc3b4edb1356" gracePeriod=600 Dec 05 12:15:48 crc kubenswrapper[4711]: I1205 12:15:48.631405 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="c792b1a4c6f9cbaaf0f7997c6c155794129b76126c2b4c55c97bfc3b4edb1356" exitCode=0 Dec 05 12:15:48 crc kubenswrapper[4711]: I1205 12:15:48.631471 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"c792b1a4c6f9cbaaf0f7997c6c155794129b76126c2b4c55c97bfc3b4edb1356"} Dec 05 12:15:48 crc kubenswrapper[4711]: I1205 12:15:48.631880 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"aca3bfb6184ef2ff9caa7ac85c79c557d3673f665fd771a118b79c6b0a0a06b1"} Dec 05 12:15:48 crc kubenswrapper[4711]: I1205 12:15:48.631906 4711 scope.go:117] "RemoveContainer" containerID="68a31f2bac94cc7e67f2a4b87251a759e841d0732cd9c3574d838857504427f4" Dec 05 12:17:48 crc kubenswrapper[4711]: I1205 12:17:48.302092 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:17:48 crc kubenswrapper[4711]: I1205 12:17:48.302939 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:18:18 crc kubenswrapper[4711]: I1205 12:18:18.301099 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:18:18 crc kubenswrapper[4711]: I1205 12:18:18.301573 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:18:48 crc kubenswrapper[4711]: I1205 12:18:48.301131 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:18:48 crc kubenswrapper[4711]: I1205 12:18:48.301811 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:18:48 crc kubenswrapper[4711]: I1205 12:18:48.301869 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:18:48 crc kubenswrapper[4711]: I1205 12:18:48.302539 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aca3bfb6184ef2ff9caa7ac85c79c557d3673f665fd771a118b79c6b0a0a06b1"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:18:48 crc kubenswrapper[4711]: I1205 12:18:48.302609 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://aca3bfb6184ef2ff9caa7ac85c79c557d3673f665fd771a118b79c6b0a0a06b1" gracePeriod=600 Dec 05 12:18:49 crc kubenswrapper[4711]: I1205 12:18:49.987591 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="aca3bfb6184ef2ff9caa7ac85c79c557d3673f665fd771a118b79c6b0a0a06b1" exitCode=0 Dec 05 12:18:49 crc kubenswrapper[4711]: I1205 12:18:49.987686 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"aca3bfb6184ef2ff9caa7ac85c79c557d3673f665fd771a118b79c6b0a0a06b1"} Dec 05 12:18:49 crc kubenswrapper[4711]: I1205 12:18:49.988144 4711 scope.go:117] "RemoveContainer" containerID="c792b1a4c6f9cbaaf0f7997c6c155794129b76126c2b4c55c97bfc3b4edb1356" Dec 05 12:18:50 crc kubenswrapper[4711]: I1205 12:18:50.996969 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"91470354b4a7faa05d09c834cdfee9e799a693b86070e148c7f8f23ec7c68fa3"} Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.089791 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-95xvt"] Dec 05 12:19:56 crc kubenswrapper[4711]: E1205 12:19:56.090404 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05e59f9-7440-4bce-8767-34085269a17f" containerName="collect-profiles" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.090415 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05e59f9-7440-4bce-8767-34085269a17f" containerName="collect-profiles" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.090513 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05e59f9-7440-4bce-8767-34085269a17f" containerName="collect-profiles" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.090850 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-95xvt" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.094982 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.095104 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.095145 4711 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-dddjb" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.101691 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-p68lq"] Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.102404 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-p68lq" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.109557 4711 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-hnhxq" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.113053 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-95xvt"] Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.116020 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-x5xls"] Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.117140 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.118741 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-p68lq"] Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.123478 4711 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-glt8s" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.137349 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-x5xls"] Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.256094 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4tlt\" (UniqueName: \"kubernetes.io/projected/38c17a5b-f6a0-4cbd-96dd-c95068e5e844-kube-api-access-p4tlt\") pod \"cert-manager-webhook-5655c58dd6-x5xls\" (UID: \"38c17a5b-f6a0-4cbd-96dd-c95068e5e844\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.256161 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx4q8\" (UniqueName: \"kubernetes.io/projected/d2ea3803-e172-4380-81eb-1027f1a667fa-kube-api-access-tx4q8\") pod \"cert-manager-5b446d88c5-p68lq\" (UID: \"d2ea3803-e172-4380-81eb-1027f1a667fa\") " pod="cert-manager/cert-manager-5b446d88c5-p68lq" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.256217 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgdnr\" (UniqueName: \"kubernetes.io/projected/34883f4c-a75b-4298-bd5b-ecf0077a79c6-kube-api-access-jgdnr\") pod \"cert-manager-cainjector-7f985d654d-95xvt\" (UID: \"34883f4c-a75b-4298-bd5b-ecf0077a79c6\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-95xvt" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.356840 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4tlt\" (UniqueName: \"kubernetes.io/projected/38c17a5b-f6a0-4cbd-96dd-c95068e5e844-kube-api-access-p4tlt\") pod \"cert-manager-webhook-5655c58dd6-x5xls\" (UID: \"38c17a5b-f6a0-4cbd-96dd-c95068e5e844\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.356887 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx4q8\" (UniqueName: \"kubernetes.io/projected/d2ea3803-e172-4380-81eb-1027f1a667fa-kube-api-access-tx4q8\") pod \"cert-manager-5b446d88c5-p68lq\" (UID: \"d2ea3803-e172-4380-81eb-1027f1a667fa\") " pod="cert-manager/cert-manager-5b446d88c5-p68lq" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.356914 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgdnr\" (UniqueName: \"kubernetes.io/projected/34883f4c-a75b-4298-bd5b-ecf0077a79c6-kube-api-access-jgdnr\") pod \"cert-manager-cainjector-7f985d654d-95xvt\" (UID: \"34883f4c-a75b-4298-bd5b-ecf0077a79c6\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-95xvt" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.374969 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgdnr\" (UniqueName: \"kubernetes.io/projected/34883f4c-a75b-4298-bd5b-ecf0077a79c6-kube-api-access-jgdnr\") pod \"cert-manager-cainjector-7f985d654d-95xvt\" (UID: \"34883f4c-a75b-4298-bd5b-ecf0077a79c6\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-95xvt" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.379719 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx4q8\" (UniqueName: \"kubernetes.io/projected/d2ea3803-e172-4380-81eb-1027f1a667fa-kube-api-access-tx4q8\") pod \"cert-manager-5b446d88c5-p68lq\" (UID: \"d2ea3803-e172-4380-81eb-1027f1a667fa\") " pod="cert-manager/cert-manager-5b446d88c5-p68lq" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.380145 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4tlt\" (UniqueName: \"kubernetes.io/projected/38c17a5b-f6a0-4cbd-96dd-c95068e5e844-kube-api-access-p4tlt\") pod \"cert-manager-webhook-5655c58dd6-x5xls\" (UID: \"38c17a5b-f6a0-4cbd-96dd-c95068e5e844\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.410147 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-95xvt" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.420919 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-p68lq" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.433791 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.714645 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-x5xls"] Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.726584 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.877256 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-p68lq"] Dec 05 12:19:56 crc kubenswrapper[4711]: W1205 12:19:56.883318 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34883f4c_a75b_4298_bd5b_ecf0077a79c6.slice/crio-fe48597129f6ec9fa6ad7d84f2676cb3432f616fb4536d054046839f12b5b98e WatchSource:0}: Error finding container fe48597129f6ec9fa6ad7d84f2676cb3432f616fb4536d054046839f12b5b98e: Status 404 returned error can't find the container with id fe48597129f6ec9fa6ad7d84f2676cb3432f616fb4536d054046839f12b5b98e Dec 05 12:19:56 crc kubenswrapper[4711]: I1205 12:19:56.883929 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-95xvt"] Dec 05 12:19:57 crc kubenswrapper[4711]: I1205 12:19:57.391508 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-95xvt" event={"ID":"34883f4c-a75b-4298-bd5b-ecf0077a79c6","Type":"ContainerStarted","Data":"fe48597129f6ec9fa6ad7d84f2676cb3432f616fb4536d054046839f12b5b98e"} Dec 05 12:19:57 crc kubenswrapper[4711]: I1205 12:19:57.393423 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-p68lq" event={"ID":"d2ea3803-e172-4380-81eb-1027f1a667fa","Type":"ContainerStarted","Data":"ed615133cd7cf482bfe7ab0efe6389f0a908d046bf8d31ca6478c90d561bb899"} Dec 05 12:19:57 crc kubenswrapper[4711]: I1205 12:19:57.395912 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" event={"ID":"38c17a5b-f6a0-4cbd-96dd-c95068e5e844","Type":"ContainerStarted","Data":"58c3f3dd63ceb20253ec905985998c885e726ed05d422e1d2d8bc05181263ad2"} Dec 05 12:20:02 crc kubenswrapper[4711]: I1205 12:20:02.433285 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-95xvt" event={"ID":"34883f4c-a75b-4298-bd5b-ecf0077a79c6","Type":"ContainerStarted","Data":"f0b9aeb3edf45b7198840118697827109185b652a29b8377cb19fabf40015952"} Dec 05 12:20:02 crc kubenswrapper[4711]: I1205 12:20:02.436512 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-p68lq" event={"ID":"d2ea3803-e172-4380-81eb-1027f1a667fa","Type":"ContainerStarted","Data":"119874d0eea9343b59935299f882b0f059fc54e29a996d78e6243e010ba61ec6"} Dec 05 12:20:02 crc kubenswrapper[4711]: I1205 12:20:02.441946 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" event={"ID":"38c17a5b-f6a0-4cbd-96dd-c95068e5e844","Type":"ContainerStarted","Data":"19e5b577182ee4b2329ef36beed0db4bb4db164864f0ff9a17f75506501baea4"} Dec 05 12:20:02 crc kubenswrapper[4711]: I1205 12:20:02.458647 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-95xvt" podStartSLOduration=2.876593901 podStartE2EDuration="6.458626117s" podCreationTimestamp="2025-12-05 12:19:56 +0000 UTC" firstStartedPulling="2025-12-05 12:19:56.887145579 +0000 UTC m=+642.471467959" lastFinishedPulling="2025-12-05 12:20:00.469177845 +0000 UTC m=+646.053500175" observedRunningTime="2025-12-05 12:20:02.451866999 +0000 UTC m=+648.036189329" watchObservedRunningTime="2025-12-05 12:20:02.458626117 +0000 UTC m=+648.042948447" Dec 05 12:20:03 crc kubenswrapper[4711]: I1205 12:20:03.447917 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" Dec 05 12:20:03 crc kubenswrapper[4711]: I1205 12:20:03.468815 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" podStartSLOduration=3.711582023 podStartE2EDuration="7.468790599s" podCreationTimestamp="2025-12-05 12:19:56 +0000 UTC" firstStartedPulling="2025-12-05 12:19:56.726286766 +0000 UTC m=+642.310609096" lastFinishedPulling="2025-12-05 12:20:00.483495342 +0000 UTC m=+646.067817672" observedRunningTime="2025-12-05 12:20:03.466475171 +0000 UTC m=+649.050797581" watchObservedRunningTime="2025-12-05 12:20:03.468790599 +0000 UTC m=+649.053112929" Dec 05 12:20:03 crc kubenswrapper[4711]: I1205 12:20:03.488990 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-p68lq" podStartSLOduration=3.829252939 podStartE2EDuration="7.488965832s" podCreationTimestamp="2025-12-05 12:19:56 +0000 UTC" firstStartedPulling="2025-12-05 12:19:56.882564135 +0000 UTC m=+642.466886495" lastFinishedPulling="2025-12-05 12:20:00.542277058 +0000 UTC m=+646.126599388" observedRunningTime="2025-12-05 12:20:03.484834029 +0000 UTC m=+649.069156409" watchObservedRunningTime="2025-12-05 12:20:03.488965832 +0000 UTC m=+649.073288162" Dec 05 12:20:06 crc kubenswrapper[4711]: I1205 12:20:06.552486 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ndz5q"] Dec 05 12:20:06 crc kubenswrapper[4711]: I1205 12:20:06.553005 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovn-controller" containerID="cri-o://03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa" gracePeriod=30 Dec 05 12:20:06 crc kubenswrapper[4711]: I1205 12:20:06.553080 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kube-rbac-proxy-node" containerID="cri-o://db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7" gracePeriod=30 Dec 05 12:20:06 crc kubenswrapper[4711]: I1205 12:20:06.553096 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="northd" containerID="cri-o://ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27" gracePeriod=30 Dec 05 12:20:06 crc kubenswrapper[4711]: I1205 12:20:06.553146 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovn-acl-logging" containerID="cri-o://6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65" gracePeriod=30 Dec 05 12:20:06 crc kubenswrapper[4711]: I1205 12:20:06.553163 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5" gracePeriod=30 Dec 05 12:20:06 crc kubenswrapper[4711]: I1205 12:20:06.553208 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="nbdb" containerID="cri-o://29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423" gracePeriod=30 Dec 05 12:20:06 crc kubenswrapper[4711]: I1205 12:20:06.553163 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="sbdb" containerID="cri-o://3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0" gracePeriod=30 Dec 05 12:20:06 crc kubenswrapper[4711]: I1205 12:20:06.589977 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" containerID="cri-o://6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2" gracePeriod=30 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.314520 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/2.log" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.317632 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovn-acl-logging/0.log" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.318191 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovn-controller/0.log" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.318770 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388155 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tpxfb"] Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388448 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388463 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388475 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="northd" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388482 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="northd" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388491 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="nbdb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388498 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="nbdb" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388510 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovn-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388517 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovn-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388524 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovn-acl-logging" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388531 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovn-acl-logging" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388543 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388549 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388560 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kube-rbac-proxy-node" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388567 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kube-rbac-proxy-node" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388578 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="sbdb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388585 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="sbdb" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388597 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388603 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388611 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388617 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388626 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kubecfg-setup" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388633 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kubecfg-setup" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388757 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kube-rbac-proxy-node" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388772 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovn-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388780 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388786 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388794 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388801 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovn-acl-logging" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388809 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="nbdb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388820 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="northd" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388830 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="sbdb" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.388948 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.388959 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.389086 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.389095 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerName="ovnkube-controller" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.391283 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418174 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-var-lib-openvswitch\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418254 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418278 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-etc-openvswitch\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418299 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-netns\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418340 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj2xq\" (UniqueName: \"kubernetes.io/projected/3bce8b78-05d7-4003-9231-24d2e07f0c2a-kube-api-access-fj2xq\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418362 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-openvswitch\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418411 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-systemd\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418443 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-systemd-units\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418461 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-kubelet\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418517 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-node-log\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418546 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-script-lib\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418573 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-netd\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418595 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-slash\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418620 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-config\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418642 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-env-overrides\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418667 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-log-socket\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418685 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-ovn\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418711 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-bin\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418736 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovn-node-metrics-cert\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.418758 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-ovn-kubernetes\") pod \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\" (UID: \"3bce8b78-05d7-4003-9231-24d2e07f0c2a\") " Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.419056 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.419091 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.419115 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.419141 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.419164 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.419963 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420006 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420172 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-log-socket" (OuterVolumeSpecName: "log-socket") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420267 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-slash" (OuterVolumeSpecName: "host-slash") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420298 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420374 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420827 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420842 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420842 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-node-log" (OuterVolumeSpecName: "node-log") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420915 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.420894 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.421615 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.426807 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bce8b78-05d7-4003-9231-24d2e07f0c2a-kube-api-access-fj2xq" (OuterVolumeSpecName: "kube-api-access-fj2xq") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "kube-api-access-fj2xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.428560 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.435660 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "3bce8b78-05d7-4003-9231-24d2e07f0c2a" (UID: "3bce8b78-05d7-4003-9231-24d2e07f0c2a"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.471691 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgkqk_0df94722-138f-4247-b308-3e3ccadc54b5/kube-multus/1.log" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.472133 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgkqk_0df94722-138f-4247-b308-3e3ccadc54b5/kube-multus/0.log" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.472165 4711 generic.go:334] "Generic (PLEG): container finished" podID="0df94722-138f-4247-b308-3e3ccadc54b5" containerID="4d80323912b8c8bcc005d1ec2e77b2b6b43a8b184779b7d98e44dc3a1e6000e0" exitCode=2 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.472234 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgkqk" event={"ID":"0df94722-138f-4247-b308-3e3ccadc54b5","Type":"ContainerDied","Data":"4d80323912b8c8bcc005d1ec2e77b2b6b43a8b184779b7d98e44dc3a1e6000e0"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.472269 4711 scope.go:117] "RemoveContainer" containerID="76119ef6ecb658d036d359ac0a8b7537c36e532deb6a53e36fc417e4d807f019" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.472705 4711 scope.go:117] "RemoveContainer" containerID="4d80323912b8c8bcc005d1ec2e77b2b6b43a8b184779b7d98e44dc3a1e6000e0" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.472854 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-wgkqk_openshift-multus(0df94722-138f-4247-b308-3e3ccadc54b5)\"" pod="openshift-multus/multus-wgkqk" podUID="0df94722-138f-4247-b308-3e3ccadc54b5" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.480232 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovnkube-controller/2.log" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.483669 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovn-acl-logging/0.log" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484347 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ndz5q_3bce8b78-05d7-4003-9231-24d2e07f0c2a/ovn-controller/0.log" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484775 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2" exitCode=0 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484796 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0" exitCode=0 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484806 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423" exitCode=0 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484814 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27" exitCode=0 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484821 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5" exitCode=0 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484829 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7" exitCode=0 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484836 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65" exitCode=143 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484844 4711 generic.go:334] "Generic (PLEG): container finished" podID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" containerID="03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa" exitCode=143 Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484864 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484888 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484899 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484909 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484918 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484928 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484938 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484948 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484954 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484959 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484965 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484970 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484975 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484981 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484986 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484991 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.484999 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485006 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485012 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485017 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485022 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485028 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485033 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485039 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485044 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485050 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485056 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485064 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485071 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485079 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485085 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485091 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485096 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485102 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485107 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485113 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485118 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485123 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485130 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" event={"ID":"3bce8b78-05d7-4003-9231-24d2e07f0c2a","Type":"ContainerDied","Data":"22c63587e5f3c11aed126725cd14775a67729e8054733e771e47db7679f65768"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485137 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485145 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485150 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485155 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485160 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485166 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485171 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485176 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485181 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485187 4711 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18"} Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.485273 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ndz5q" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.502211 4711 scope.go:117] "RemoveContainer" containerID="6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.519998 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-run-ovn-kubernetes\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520061 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5201af9e-9010-41ea-95c2-1e45c7557e7f-env-overrides\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520087 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-slash\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520115 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-kubelet\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520152 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-run-ovn\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520175 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-var-lib-openvswitch\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520194 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-cni-bin\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520219 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-run-openvswitch\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520246 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-etc-openvswitch\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520590 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-log-socket\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520783 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkszj\" (UniqueName: \"kubernetes.io/projected/5201af9e-9010-41ea-95c2-1e45c7557e7f-kube-api-access-fkszj\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520857 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5201af9e-9010-41ea-95c2-1e45c7557e7f-ovnkube-script-lib\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520905 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5201af9e-9010-41ea-95c2-1e45c7557e7f-ovn-node-metrics-cert\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.520944 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-systemd-units\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521042 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521230 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5201af9e-9010-41ea-95c2-1e45c7557e7f-ovnkube-config\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521264 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-cni-netd\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521329 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-run-systemd\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521376 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-run-netns\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521413 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-node-log\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521578 4711 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521631 4711 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521648 4711 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521665 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj2xq\" (UniqueName: \"kubernetes.io/projected/3bce8b78-05d7-4003-9231-24d2e07f0c2a-kube-api-access-fj2xq\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521699 4711 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521713 4711 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521727 4711 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521740 4711 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521754 4711 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521782 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521797 4711 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521810 4711 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521823 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521836 4711 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3bce8b78-05d7-4003-9231-24d2e07f0c2a-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521867 4711 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521878 4711 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521894 4711 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521907 4711 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3bce8b78-05d7-4003-9231-24d2e07f0c2a-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521942 4711 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.521957 4711 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3bce8b78-05d7-4003-9231-24d2e07f0c2a-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.528653 4711 scope.go:117] "RemoveContainer" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.539690 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ndz5q"] Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.543504 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ndz5q"] Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.561711 4711 scope.go:117] "RemoveContainer" containerID="3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.577272 4711 scope.go:117] "RemoveContainer" containerID="29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.600337 4711 scope.go:117] "RemoveContainer" containerID="ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.616881 4711 scope.go:117] "RemoveContainer" containerID="0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.622763 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkszj\" (UniqueName: \"kubernetes.io/projected/5201af9e-9010-41ea-95c2-1e45c7557e7f-kube-api-access-fkszj\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.622833 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5201af9e-9010-41ea-95c2-1e45c7557e7f-ovn-node-metrics-cert\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.622866 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5201af9e-9010-41ea-95c2-1e45c7557e7f-ovnkube-script-lib\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.622911 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-systemd-units\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.622952 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5201af9e-9010-41ea-95c2-1e45c7557e7f-ovnkube-config\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623101 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-cni-netd\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623126 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-systemd-units\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623219 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-run-netns\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623161 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-run-netns\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623290 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-run-systemd\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623305 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623325 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-node-log\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623344 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-cni-netd\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623410 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-run-ovn-kubernetes\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623459 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5201af9e-9010-41ea-95c2-1e45c7557e7f-env-overrides\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623490 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-slash\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623524 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-kubelet\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623557 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-run-ovn\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623584 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-var-lib-openvswitch\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623637 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-cni-bin\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623696 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-run-openvswitch\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623757 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-etc-openvswitch\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623807 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-log-socket\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623956 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-run-systemd\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.623984 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-node-log\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624015 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-run-ovn-kubernetes\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624042 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-cni-bin\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624050 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-var-lib-openvswitch\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624105 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-run-openvswitch\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624147 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-log-socket\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624156 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-etc-openvswitch\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624188 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-slash\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624212 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-host-kubelet\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624267 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5201af9e-9010-41ea-95c2-1e45c7557e7f-run-ovn\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624286 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5201af9e-9010-41ea-95c2-1e45c7557e7f-ovnkube-config\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624634 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5201af9e-9010-41ea-95c2-1e45c7557e7f-ovnkube-script-lib\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.624783 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5201af9e-9010-41ea-95c2-1e45c7557e7f-env-overrides\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.628314 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5201af9e-9010-41ea-95c2-1e45c7557e7f-ovn-node-metrics-cert\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.634016 4711 scope.go:117] "RemoveContainer" containerID="db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.642034 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkszj\" (UniqueName: \"kubernetes.io/projected/5201af9e-9010-41ea-95c2-1e45c7557e7f-kube-api-access-fkszj\") pod \"ovnkube-node-tpxfb\" (UID: \"5201af9e-9010-41ea-95c2-1e45c7557e7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.655154 4711 scope.go:117] "RemoveContainer" containerID="6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.672938 4711 scope.go:117] "RemoveContainer" containerID="03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.701364 4711 scope.go:117] "RemoveContainer" containerID="ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.714723 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.720049 4711 scope.go:117] "RemoveContainer" containerID="6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.720823 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2\": container with ID starting with 6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2 not found: ID does not exist" containerID="6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.721000 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} err="failed to get container status \"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2\": rpc error: code = NotFound desc = could not find container \"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2\": container with ID starting with 6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.721119 4711 scope.go:117] "RemoveContainer" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.721735 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\": container with ID starting with 14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af not found: ID does not exist" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.721786 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} err="failed to get container status \"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\": rpc error: code = NotFound desc = could not find container \"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\": container with ID starting with 14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.721832 4711 scope.go:117] "RemoveContainer" containerID="3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.722120 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\": container with ID starting with 3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0 not found: ID does not exist" containerID="3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.722168 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} err="failed to get container status \"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\": rpc error: code = NotFound desc = could not find container \"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\": container with ID starting with 3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.722194 4711 scope.go:117] "RemoveContainer" containerID="29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.722537 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\": container with ID starting with 29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423 not found: ID does not exist" containerID="29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.722648 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} err="failed to get container status \"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\": rpc error: code = NotFound desc = could not find container \"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\": container with ID starting with 29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.722737 4711 scope.go:117] "RemoveContainer" containerID="ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.723838 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\": container with ID starting with ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27 not found: ID does not exist" containerID="ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.723877 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} err="failed to get container status \"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\": rpc error: code = NotFound desc = could not find container \"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\": container with ID starting with ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.723897 4711 scope.go:117] "RemoveContainer" containerID="0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.724208 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\": container with ID starting with 0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5 not found: ID does not exist" containerID="0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.724543 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} err="failed to get container status \"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\": rpc error: code = NotFound desc = could not find container \"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\": container with ID starting with 0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.724652 4711 scope.go:117] "RemoveContainer" containerID="db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.725028 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\": container with ID starting with db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7 not found: ID does not exist" containerID="db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.725064 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} err="failed to get container status \"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\": rpc error: code = NotFound desc = could not find container \"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\": container with ID starting with db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.725104 4711 scope.go:117] "RemoveContainer" containerID="6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.725843 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\": container with ID starting with 6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65 not found: ID does not exist" containerID="6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.725875 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} err="failed to get container status \"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\": rpc error: code = NotFound desc = could not find container \"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\": container with ID starting with 6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.725895 4711 scope.go:117] "RemoveContainer" containerID="03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.726269 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\": container with ID starting with 03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa not found: ID does not exist" containerID="03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.726304 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} err="failed to get container status \"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\": rpc error: code = NotFound desc = could not find container \"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\": container with ID starting with 03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.726322 4711 scope.go:117] "RemoveContainer" containerID="ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18" Dec 05 12:20:07 crc kubenswrapper[4711]: E1205 12:20:07.726602 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\": container with ID starting with ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18 not found: ID does not exist" containerID="ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.726628 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18"} err="failed to get container status \"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\": rpc error: code = NotFound desc = could not find container \"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\": container with ID starting with ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.726643 4711 scope.go:117] "RemoveContainer" containerID="6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.726862 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} err="failed to get container status \"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2\": rpc error: code = NotFound desc = could not find container \"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2\": container with ID starting with 6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.726913 4711 scope.go:117] "RemoveContainer" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727098 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} err="failed to get container status \"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\": rpc error: code = NotFound desc = could not find container \"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\": container with ID starting with 14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727119 4711 scope.go:117] "RemoveContainer" containerID="3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727300 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} err="failed to get container status \"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\": rpc error: code = NotFound desc = could not find container \"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\": container with ID starting with 3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727321 4711 scope.go:117] "RemoveContainer" containerID="29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727531 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} err="failed to get container status \"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\": rpc error: code = NotFound desc = could not find container \"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\": container with ID starting with 29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727552 4711 scope.go:117] "RemoveContainer" containerID="ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727720 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} err="failed to get container status \"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\": rpc error: code = NotFound desc = could not find container \"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\": container with ID starting with ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727740 4711 scope.go:117] "RemoveContainer" containerID="0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727904 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} err="failed to get container status \"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\": rpc error: code = NotFound desc = could not find container \"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\": container with ID starting with 0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.727922 4711 scope.go:117] "RemoveContainer" containerID="db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.728216 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} err="failed to get container status \"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\": rpc error: code = NotFound desc = could not find container \"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\": container with ID starting with db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.728248 4711 scope.go:117] "RemoveContainer" containerID="6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.728606 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} err="failed to get container status \"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\": rpc error: code = NotFound desc = could not find container \"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\": container with ID starting with 6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.728628 4711 scope.go:117] "RemoveContainer" containerID="03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.728839 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} err="failed to get container status \"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\": rpc error: code = NotFound desc = could not find container \"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\": container with ID starting with 03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.728866 4711 scope.go:117] "RemoveContainer" containerID="ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.729061 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18"} err="failed to get container status \"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\": rpc error: code = NotFound desc = could not find container \"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\": container with ID starting with ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.729080 4711 scope.go:117] "RemoveContainer" containerID="6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.729277 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} err="failed to get container status \"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2\": rpc error: code = NotFound desc = could not find container \"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2\": container with ID starting with 6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.729297 4711 scope.go:117] "RemoveContainer" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.729513 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} err="failed to get container status \"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\": rpc error: code = NotFound desc = could not find container \"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\": container with ID starting with 14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.729535 4711 scope.go:117] "RemoveContainer" containerID="3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.729764 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} err="failed to get container status \"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\": rpc error: code = NotFound desc = could not find container \"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\": container with ID starting with 3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.729785 4711 scope.go:117] "RemoveContainer" containerID="29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.730017 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} err="failed to get container status \"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\": rpc error: code = NotFound desc = could not find container \"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\": container with ID starting with 29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.730046 4711 scope.go:117] "RemoveContainer" containerID="ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.730246 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} err="failed to get container status \"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\": rpc error: code = NotFound desc = could not find container \"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\": container with ID starting with ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.730271 4711 scope.go:117] "RemoveContainer" containerID="0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.730748 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} err="failed to get container status \"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\": rpc error: code = NotFound desc = could not find container \"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\": container with ID starting with 0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.730769 4711 scope.go:117] "RemoveContainer" containerID="db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.730965 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} err="failed to get container status \"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\": rpc error: code = NotFound desc = could not find container \"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\": container with ID starting with db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.730984 4711 scope.go:117] "RemoveContainer" containerID="6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.731165 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} err="failed to get container status \"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\": rpc error: code = NotFound desc = could not find container \"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\": container with ID starting with 6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.731184 4711 scope.go:117] "RemoveContainer" containerID="03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.731430 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} err="failed to get container status \"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\": rpc error: code = NotFound desc = could not find container \"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\": container with ID starting with 03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.731459 4711 scope.go:117] "RemoveContainer" containerID="ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.731653 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18"} err="failed to get container status \"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\": rpc error: code = NotFound desc = could not find container \"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\": container with ID starting with ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.731676 4711 scope.go:117] "RemoveContainer" containerID="6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.731827 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2"} err="failed to get container status \"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2\": rpc error: code = NotFound desc = could not find container \"6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2\": container with ID starting with 6218fc7524469b1189bf6908a68a4bb3cae122ed46084b03feb4540081484af2 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.731844 4711 scope.go:117] "RemoveContainer" containerID="14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.732042 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af"} err="failed to get container status \"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\": rpc error: code = NotFound desc = could not find container \"14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af\": container with ID starting with 14be1a94f55defa8e4ef6f72884e0f41ab55ac2b5ecbcc5fefa926b2cf9d97af not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.732061 4711 scope.go:117] "RemoveContainer" containerID="3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.732263 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0"} err="failed to get container status \"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\": rpc error: code = NotFound desc = could not find container \"3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0\": container with ID starting with 3556f50787f5fd7aed9f37784aec510d299f4d5ea3aa09e06221a75edccb23b0 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.732287 4711 scope.go:117] "RemoveContainer" containerID="29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.732620 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423"} err="failed to get container status \"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\": rpc error: code = NotFound desc = could not find container \"29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423\": container with ID starting with 29e205d22f6d675592a6702a3031f0cb2192650bf40841e13a0bdcc5a38ce423 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.732646 4711 scope.go:117] "RemoveContainer" containerID="ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.733161 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27"} err="failed to get container status \"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\": rpc error: code = NotFound desc = could not find container \"ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27\": container with ID starting with ab4b128c83609b7bf9831e5077afb3887aaeff710248e1452473f824ff6a0e27 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.733190 4711 scope.go:117] "RemoveContainer" containerID="0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.733473 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5"} err="failed to get container status \"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\": rpc error: code = NotFound desc = could not find container \"0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5\": container with ID starting with 0e00aea80c876c78ea652e21d7361c71127725036725c12f21e5dd6fec3c63e5 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.733510 4711 scope.go:117] "RemoveContainer" containerID="db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.733829 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7"} err="failed to get container status \"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\": rpc error: code = NotFound desc = could not find container \"db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7\": container with ID starting with db79679e38f3396d6a7c7c46868fa1131b6a1d027e33733e26ba2767c63204f7 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.733858 4711 scope.go:117] "RemoveContainer" containerID="6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.734363 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65"} err="failed to get container status \"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\": rpc error: code = NotFound desc = could not find container \"6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65\": container with ID starting with 6bdd960616aacc92bcf476997d2c167dc09274dba962568d42f846578c984e65 not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.734403 4711 scope.go:117] "RemoveContainer" containerID="03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.734626 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa"} err="failed to get container status \"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\": rpc error: code = NotFound desc = could not find container \"03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa\": container with ID starting with 03a5fd97097e8ec35bda17e70a7c81748ea005f226d497489c6c18f10b9ca4aa not found: ID does not exist" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.734647 4711 scope.go:117] "RemoveContainer" containerID="ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18" Dec 05 12:20:07 crc kubenswrapper[4711]: I1205 12:20:07.734834 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18"} err="failed to get container status \"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\": rpc error: code = NotFound desc = could not find container \"ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18\": container with ID starting with ad57425d8e4ee3ef12f6e3e4c759a15120f2d2c27169fe63b2bfc4482ea5ef18 not found: ID does not exist" Dec 05 12:20:08 crc kubenswrapper[4711]: I1205 12:20:08.492650 4711 generic.go:334] "Generic (PLEG): container finished" podID="5201af9e-9010-41ea-95c2-1e45c7557e7f" containerID="f529dadc810653621d324161aba93eda52a22cd0af6b1877cab4b4851d407fda" exitCode=0 Dec 05 12:20:08 crc kubenswrapper[4711]: I1205 12:20:08.492820 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerDied","Data":"f529dadc810653621d324161aba93eda52a22cd0af6b1877cab4b4851d407fda"} Dec 05 12:20:08 crc kubenswrapper[4711]: I1205 12:20:08.493227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerStarted","Data":"fa26090e524a7872576a6a002e147e334f57e01be0364ad9af33453c3468c693"} Dec 05 12:20:08 crc kubenswrapper[4711]: I1205 12:20:08.498451 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgkqk_0df94722-138f-4247-b308-3e3ccadc54b5/kube-multus/1.log" Dec 05 12:20:08 crc kubenswrapper[4711]: I1205 12:20:08.694576 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bce8b78-05d7-4003-9231-24d2e07f0c2a" path="/var/lib/kubelet/pods/3bce8b78-05d7-4003-9231-24d2e07f0c2a/volumes" Dec 05 12:20:09 crc kubenswrapper[4711]: I1205 12:20:09.510787 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerStarted","Data":"c57358d3d2e49da9cedafb984938e6abcbf7831a9cfeedfb5f303f0788ad3af0"} Dec 05 12:20:09 crc kubenswrapper[4711]: I1205 12:20:09.510878 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerStarted","Data":"3085c085ece6aa4ae5545f2a6da655e739e949b05049c20feb5a4497fe674119"} Dec 05 12:20:09 crc kubenswrapper[4711]: I1205 12:20:09.510898 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerStarted","Data":"d47c9f5191cf31b5d872855c5ae4574d1684a92562af2f802a7085b95e693341"} Dec 05 12:20:09 crc kubenswrapper[4711]: I1205 12:20:09.510912 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerStarted","Data":"4476a7c2c8a599df3e9fd760a573d20043cbc4d1342632f6f802d96abd029ded"} Dec 05 12:20:09 crc kubenswrapper[4711]: I1205 12:20:09.510924 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerStarted","Data":"7292a073a379efcf6434c5f27b720c405a0df72eabfa8f6b825149ad2c3ecf19"} Dec 05 12:20:09 crc kubenswrapper[4711]: I1205 12:20:09.510936 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerStarted","Data":"4a343639bc267253f38d4058d328efdb1f73e867bb8273f28d59aea125ee6159"} Dec 05 12:20:11 crc kubenswrapper[4711]: I1205 12:20:11.436810 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-x5xls" Dec 05 12:20:11 crc kubenswrapper[4711]: I1205 12:20:11.531755 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerStarted","Data":"eaa2f0dccfe96a01901cbdf7b888de51f8c683169286ba44c1384a954913b04a"} Dec 05 12:20:14 crc kubenswrapper[4711]: I1205 12:20:14.555992 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" event={"ID":"5201af9e-9010-41ea-95c2-1e45c7557e7f","Type":"ContainerStarted","Data":"e518e604edcbcd1bc916e3aaec6730bd3fb49571d4c8a97423cf05b76cf05eda"} Dec 05 12:20:14 crc kubenswrapper[4711]: I1205 12:20:14.556980 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:14 crc kubenswrapper[4711]: I1205 12:20:14.557005 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:14 crc kubenswrapper[4711]: I1205 12:20:14.594914 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" podStartSLOduration=7.594888023 podStartE2EDuration="7.594888023s" podCreationTimestamp="2025-12-05 12:20:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:20:14.587071831 +0000 UTC m=+660.171394191" watchObservedRunningTime="2025-12-05 12:20:14.594888023 +0000 UTC m=+660.179210353" Dec 05 12:20:14 crc kubenswrapper[4711]: I1205 12:20:14.601969 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:15 crc kubenswrapper[4711]: I1205 12:20:15.527507 4711 scope.go:117] "RemoveContainer" containerID="39c4963dec192b286536d008c3bf513a849587044d473299ac74ecbed102f2c2" Dec 05 12:20:15 crc kubenswrapper[4711]: I1205 12:20:15.562845 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:15 crc kubenswrapper[4711]: I1205 12:20:15.587101 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:21 crc kubenswrapper[4711]: I1205 12:20:21.683534 4711 scope.go:117] "RemoveContainer" containerID="4d80323912b8c8bcc005d1ec2e77b2b6b43a8b184779b7d98e44dc3a1e6000e0" Dec 05 12:20:23 crc kubenswrapper[4711]: I1205 12:20:23.609004 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgkqk_0df94722-138f-4247-b308-3e3ccadc54b5/kube-multus/1.log" Dec 05 12:20:23 crc kubenswrapper[4711]: I1205 12:20:23.609332 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgkqk" event={"ID":"0df94722-138f-4247-b308-3e3ccadc54b5","Type":"ContainerStarted","Data":"ad40d651c9bd449c2425b3969b54402cbfdb06c9483d0cc98a9f519920a03273"} Dec 05 12:20:37 crc kubenswrapper[4711]: I1205 12:20:37.733805 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tpxfb" Dec 05 12:20:46 crc kubenswrapper[4711]: I1205 12:20:46.895838 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz"] Dec 05 12:20:46 crc kubenswrapper[4711]: I1205 12:20:46.897599 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:46 crc kubenswrapper[4711]: I1205 12:20:46.899558 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 12:20:46 crc kubenswrapper[4711]: I1205 12:20:46.908281 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz"] Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.015840 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.016006 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.016180 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pjp2\" (UniqueName: \"kubernetes.io/projected/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-kube-api-access-5pjp2\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.118118 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.118216 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pjp2\" (UniqueName: \"kubernetes.io/projected/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-kube-api-access-5pjp2\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.118252 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.118842 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.118866 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.138564 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pjp2\" (UniqueName: \"kubernetes.io/projected/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-kube-api-access-5pjp2\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.217082 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.401046 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz"] Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.736449 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" event={"ID":"18a13fe2-c30e-4b1f-8c32-dafc95e5000a","Type":"ContainerStarted","Data":"a02005b8cca8c8ab00a2b24e8d916f2b8c2cedfa161098bce3a3298c2c0fc48f"} Dec 05 12:20:47 crc kubenswrapper[4711]: I1205 12:20:47.736506 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" event={"ID":"18a13fe2-c30e-4b1f-8c32-dafc95e5000a","Type":"ContainerStarted","Data":"e05e7c2a198e5191e6115434da4dd9556445e35812fd3ba3e6cee20fe48f7e7c"} Dec 05 12:20:48 crc kubenswrapper[4711]: I1205 12:20:48.743457 4711 generic.go:334] "Generic (PLEG): container finished" podID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerID="a02005b8cca8c8ab00a2b24e8d916f2b8c2cedfa161098bce3a3298c2c0fc48f" exitCode=0 Dec 05 12:20:48 crc kubenswrapper[4711]: I1205 12:20:48.743518 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" event={"ID":"18a13fe2-c30e-4b1f-8c32-dafc95e5000a","Type":"ContainerDied","Data":"a02005b8cca8c8ab00a2b24e8d916f2b8c2cedfa161098bce3a3298c2c0fc48f"} Dec 05 12:20:51 crc kubenswrapper[4711]: I1205 12:20:51.764855 4711 generic.go:334] "Generic (PLEG): container finished" podID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerID="11995c113117ecf4a3aaa0d36e30eeceb9ac559025ac901f41660f38541ad500" exitCode=0 Dec 05 12:20:51 crc kubenswrapper[4711]: I1205 12:20:51.764900 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" event={"ID":"18a13fe2-c30e-4b1f-8c32-dafc95e5000a","Type":"ContainerDied","Data":"11995c113117ecf4a3aaa0d36e30eeceb9ac559025ac901f41660f38541ad500"} Dec 05 12:20:52 crc kubenswrapper[4711]: I1205 12:20:52.772794 4711 generic.go:334] "Generic (PLEG): container finished" podID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerID="e94606d949d391e4938d1eb34a7462156673758c36440b46d7cdcc0c530ba2dd" exitCode=0 Dec 05 12:20:52 crc kubenswrapper[4711]: I1205 12:20:52.772966 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" event={"ID":"18a13fe2-c30e-4b1f-8c32-dafc95e5000a","Type":"ContainerDied","Data":"e94606d949d391e4938d1eb34a7462156673758c36440b46d7cdcc0c530ba2dd"} Dec 05 12:20:53 crc kubenswrapper[4711]: I1205 12:20:53.982210 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.105693 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-bundle\") pod \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.105772 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pjp2\" (UniqueName: \"kubernetes.io/projected/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-kube-api-access-5pjp2\") pod \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.105846 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-util\") pod \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\" (UID: \"18a13fe2-c30e-4b1f-8c32-dafc95e5000a\") " Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.108219 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-bundle" (OuterVolumeSpecName: "bundle") pod "18a13fe2-c30e-4b1f-8c32-dafc95e5000a" (UID: "18a13fe2-c30e-4b1f-8c32-dafc95e5000a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.111135 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-kube-api-access-5pjp2" (OuterVolumeSpecName: "kube-api-access-5pjp2") pod "18a13fe2-c30e-4b1f-8c32-dafc95e5000a" (UID: "18a13fe2-c30e-4b1f-8c32-dafc95e5000a"). InnerVolumeSpecName "kube-api-access-5pjp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.116026 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-util" (OuterVolumeSpecName: "util") pod "18a13fe2-c30e-4b1f-8c32-dafc95e5000a" (UID: "18a13fe2-c30e-4b1f-8c32-dafc95e5000a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.207547 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pjp2\" (UniqueName: \"kubernetes.io/projected/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-kube-api-access-5pjp2\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.207589 4711 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-util\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.207602 4711 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/18a13fe2-c30e-4b1f-8c32-dafc95e5000a-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.789355 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" event={"ID":"18a13fe2-c30e-4b1f-8c32-dafc95e5000a","Type":"ContainerDied","Data":"e05e7c2a198e5191e6115434da4dd9556445e35812fd3ba3e6cee20fe48f7e7c"} Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.789432 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e05e7c2a198e5191e6115434da4dd9556445e35812fd3ba3e6cee20fe48f7e7c" Dec 05 12:20:54 crc kubenswrapper[4711]: I1205 12:20:54.789457 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.376726 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2"] Dec 05 12:21:04 crc kubenswrapper[4711]: E1205 12:21:04.377586 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerName="pull" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.377604 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerName="pull" Dec 05 12:21:04 crc kubenswrapper[4711]: E1205 12:21:04.377622 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerName="util" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.377629 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerName="util" Dec 05 12:21:04 crc kubenswrapper[4711]: E1205 12:21:04.377645 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerName="extract" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.377652 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerName="extract" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.377779 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="18a13fe2-c30e-4b1f-8c32-dafc95e5000a" containerName="extract" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.378280 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.380417 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.380617 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.380740 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-6m2zk" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.392764 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2"] Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.472225 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzcjp\" (UniqueName: \"kubernetes.io/projected/8d47e5b8-d669-4947-b439-eaa06a31187e-kube-api-access-nzcjp\") pod \"obo-prometheus-operator-668cf9dfbb-g96b2\" (UID: \"8d47e5b8-d669-4947-b439-eaa06a31187e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.571245 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c"] Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.572242 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.573425 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzcjp\" (UniqueName: \"kubernetes.io/projected/8d47e5b8-d669-4947-b439-eaa06a31187e-kube-api-access-nzcjp\") pod \"obo-prometheus-operator-668cf9dfbb-g96b2\" (UID: \"8d47e5b8-d669-4947-b439-eaa06a31187e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.590545 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.591425 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-9fh6f" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.594069 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j"] Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.594951 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.615416 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzcjp\" (UniqueName: \"kubernetes.io/projected/8d47e5b8-d669-4947-b439-eaa06a31187e-kube-api-access-nzcjp\") pod \"obo-prometheus-operator-668cf9dfbb-g96b2\" (UID: \"8d47e5b8-d669-4947-b439-eaa06a31187e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.615907 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j"] Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.675844 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3edd2ed9-7a50-40f6-8a48-9398732a79cb-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-nhj4j\" (UID: \"3edd2ed9-7a50-40f6-8a48-9398732a79cb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.675933 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3edd2ed9-7a50-40f6-8a48-9398732a79cb-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-nhj4j\" (UID: \"3edd2ed9-7a50-40f6-8a48-9398732a79cb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.675990 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f7e16934-1f3b-4272-ac69-4bcc6eec482f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-ng96c\" (UID: \"f7e16934-1f3b-4272-ac69-4bcc6eec482f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.676012 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f7e16934-1f3b-4272-ac69-4bcc6eec482f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-ng96c\" (UID: \"f7e16934-1f3b-4272-ac69-4bcc6eec482f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.695780 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c"] Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.703358 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.773319 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-6lzpv"] Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.775241 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.778266 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3edd2ed9-7a50-40f6-8a48-9398732a79cb-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-nhj4j\" (UID: \"3edd2ed9-7a50-40f6-8a48-9398732a79cb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.778413 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f7e16934-1f3b-4272-ac69-4bcc6eec482f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-ng96c\" (UID: \"f7e16934-1f3b-4272-ac69-4bcc6eec482f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.778446 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f7e16934-1f3b-4272-ac69-4bcc6eec482f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-ng96c\" (UID: \"f7e16934-1f3b-4272-ac69-4bcc6eec482f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.778564 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3edd2ed9-7a50-40f6-8a48-9398732a79cb-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-nhj4j\" (UID: \"3edd2ed9-7a50-40f6-8a48-9398732a79cb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.780608 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.782917 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-9bck5" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.786880 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f7e16934-1f3b-4272-ac69-4bcc6eec482f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-ng96c\" (UID: \"f7e16934-1f3b-4272-ac69-4bcc6eec482f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.787597 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3edd2ed9-7a50-40f6-8a48-9398732a79cb-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-nhj4j\" (UID: \"3edd2ed9-7a50-40f6-8a48-9398732a79cb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.788337 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f7e16934-1f3b-4272-ac69-4bcc6eec482f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-ng96c\" (UID: \"f7e16934-1f3b-4272-ac69-4bcc6eec482f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.792103 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3edd2ed9-7a50-40f6-8a48-9398732a79cb-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c959d4656-nhj4j\" (UID: \"3edd2ed9-7a50-40f6-8a48-9398732a79cb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.797550 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-6lzpv"] Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.882411 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsc4k\" (UniqueName: \"kubernetes.io/projected/d56ccf44-6d74-47a4-93f2-d242ad353756-kube-api-access-qsc4k\") pod \"observability-operator-d8bb48f5d-6lzpv\" (UID: \"d56ccf44-6d74-47a4-93f2-d242ad353756\") " pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.882522 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d56ccf44-6d74-47a4-93f2-d242ad353756-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-6lzpv\" (UID: \"d56ccf44-6d74-47a4-93f2-d242ad353756\") " pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.890402 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.917807 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.939772 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-brnw9"] Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.940958 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.957018 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-fjl88" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.968751 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-brnw9"] Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.984236 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d56ccf44-6d74-47a4-93f2-d242ad353756-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-6lzpv\" (UID: \"d56ccf44-6d74-47a4-93f2-d242ad353756\") " pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.984407 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsc4k\" (UniqueName: \"kubernetes.io/projected/d56ccf44-6d74-47a4-93f2-d242ad353756-kube-api-access-qsc4k\") pod \"observability-operator-d8bb48f5d-6lzpv\" (UID: \"d56ccf44-6d74-47a4-93f2-d242ad353756\") " pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:04 crc kubenswrapper[4711]: I1205 12:21:04.993535 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d56ccf44-6d74-47a4-93f2-d242ad353756-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-6lzpv\" (UID: \"d56ccf44-6d74-47a4-93f2-d242ad353756\") " pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.008690 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsc4k\" (UniqueName: \"kubernetes.io/projected/d56ccf44-6d74-47a4-93f2-d242ad353756-kube-api-access-qsc4k\") pod \"observability-operator-d8bb48f5d-6lzpv\" (UID: \"d56ccf44-6d74-47a4-93f2-d242ad353756\") " pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.085843 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/0b910cd0-6b78-4758-a2b9-2a451b85ca3d-openshift-service-ca\") pod \"perses-operator-5446b9c989-brnw9\" (UID: \"0b910cd0-6b78-4758-a2b9-2a451b85ca3d\") " pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.085917 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgq9t\" (UniqueName: \"kubernetes.io/projected/0b910cd0-6b78-4758-a2b9-2a451b85ca3d-kube-api-access-jgq9t\") pod \"perses-operator-5446b9c989-brnw9\" (UID: \"0b910cd0-6b78-4758-a2b9-2a451b85ca3d\") " pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.125261 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.141408 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2"] Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.187840 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgq9t\" (UniqueName: \"kubernetes.io/projected/0b910cd0-6b78-4758-a2b9-2a451b85ca3d-kube-api-access-jgq9t\") pod \"perses-operator-5446b9c989-brnw9\" (UID: \"0b910cd0-6b78-4758-a2b9-2a451b85ca3d\") " pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.187981 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/0b910cd0-6b78-4758-a2b9-2a451b85ca3d-openshift-service-ca\") pod \"perses-operator-5446b9c989-brnw9\" (UID: \"0b910cd0-6b78-4758-a2b9-2a451b85ca3d\") " pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.189866 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/0b910cd0-6b78-4758-a2b9-2a451b85ca3d-openshift-service-ca\") pod \"perses-operator-5446b9c989-brnw9\" (UID: \"0b910cd0-6b78-4758-a2b9-2a451b85ca3d\") " pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.229288 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgq9t\" (UniqueName: \"kubernetes.io/projected/0b910cd0-6b78-4758-a2b9-2a451b85ca3d-kube-api-access-jgq9t\") pod \"perses-operator-5446b9c989-brnw9\" (UID: \"0b910cd0-6b78-4758-a2b9-2a451b85ca3d\") " pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.268541 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j"] Dec 05 12:21:05 crc kubenswrapper[4711]: W1205 12:21:05.288574 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3edd2ed9_7a50_40f6_8a48_9398732a79cb.slice/crio-727176a3df31dd08f5b0b750a0a2c04f12b047d39ef1fdfb7da9ee4e0764678f WatchSource:0}: Error finding container 727176a3df31dd08f5b0b750a0a2c04f12b047d39ef1fdfb7da9ee4e0764678f: Status 404 returned error can't find the container with id 727176a3df31dd08f5b0b750a0a2c04f12b047d39ef1fdfb7da9ee4e0764678f Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.294566 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.299037 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c"] Dec 05 12:21:05 crc kubenswrapper[4711]: W1205 12:21:05.337220 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7e16934_1f3b_4272_ac69_4bcc6eec482f.slice/crio-7eaca426e3133973138b87b9357bcaa5f1202f819d0ed984e6a4a3b76df87094 WatchSource:0}: Error finding container 7eaca426e3133973138b87b9357bcaa5f1202f819d0ed984e6a4a3b76df87094: Status 404 returned error can't find the container with id 7eaca426e3133973138b87b9357bcaa5f1202f819d0ed984e6a4a3b76df87094 Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.496012 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-6lzpv"] Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.596526 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-brnw9"] Dec 05 12:21:05 crc kubenswrapper[4711]: W1205 12:21:05.604499 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b910cd0_6b78_4758_a2b9_2a451b85ca3d.slice/crio-5273ae5aa6e7babce5e04b767e30eb44c53f41d782aba0e0d5766475d5cc8d4d WatchSource:0}: Error finding container 5273ae5aa6e7babce5e04b767e30eb44c53f41d782aba0e0d5766475d5cc8d4d: Status 404 returned error can't find the container with id 5273ae5aa6e7babce5e04b767e30eb44c53f41d782aba0e0d5766475d5cc8d4d Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.872676 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" event={"ID":"3edd2ed9-7a50-40f6-8a48-9398732a79cb","Type":"ContainerStarted","Data":"727176a3df31dd08f5b0b750a0a2c04f12b047d39ef1fdfb7da9ee4e0764678f"} Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.874564 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" event={"ID":"f7e16934-1f3b-4272-ac69-4bcc6eec482f","Type":"ContainerStarted","Data":"7eaca426e3133973138b87b9357bcaa5f1202f819d0ed984e6a4a3b76df87094"} Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.875935 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" event={"ID":"d56ccf44-6d74-47a4-93f2-d242ad353756","Type":"ContainerStarted","Data":"5beedcf2916f3d57e9984ab3ee2cce5dbe50ba57bd6b7501ff8484f68935424b"} Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.877251 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-brnw9" event={"ID":"0b910cd0-6b78-4758-a2b9-2a451b85ca3d","Type":"ContainerStarted","Data":"5273ae5aa6e7babce5e04b767e30eb44c53f41d782aba0e0d5766475d5cc8d4d"} Dec 05 12:21:05 crc kubenswrapper[4711]: I1205 12:21:05.878591 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" event={"ID":"8d47e5b8-d669-4947-b439-eaa06a31187e","Type":"ContainerStarted","Data":"b5117ce534f0caaefe1bed6a61bbfc0f0a9d6f13dff7cda9b467499ead127568"} Dec 05 12:21:11 crc kubenswrapper[4711]: I1205 12:21:11.522100 4711 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 12:21:18 crc kubenswrapper[4711]: I1205 12:21:18.300673 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:21:18 crc kubenswrapper[4711]: I1205 12:21:18.301294 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:21:23 crc kubenswrapper[4711]: E1205 12:21:23.369975 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 05 12:21:23 crc kubenswrapper[4711]: E1205 12:21:23.370579 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nzcjp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-g96b2_openshift-operators(8d47e5b8-d669-4947-b439-eaa06a31187e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:21:23 crc kubenswrapper[4711]: E1205 12:21:23.372347 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" podUID="8d47e5b8-d669-4947-b439-eaa06a31187e" Dec 05 12:21:24 crc kubenswrapper[4711]: E1205 12:21:23.999412 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" podUID="8d47e5b8-d669-4947-b439-eaa06a31187e" Dec 05 12:21:24 crc kubenswrapper[4711]: E1205 12:21:24.041121 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 05 12:21:24 crc kubenswrapper[4711]: E1205 12:21:24.043574 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-c959d4656-ng96c_openshift-operators(f7e16934-1f3b-4272-ac69-4bcc6eec482f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:21:24 crc kubenswrapper[4711]: E1205 12:21:24.044897 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" podUID="f7e16934-1f3b-4272-ac69-4bcc6eec482f" Dec 05 12:21:25 crc kubenswrapper[4711]: E1205 12:21:25.004624 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" podUID="f7e16934-1f3b-4272-ac69-4bcc6eec482f" Dec 05 12:21:26 crc kubenswrapper[4711]: E1205 12:21:26.407787 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Dec 05 12:21:26 crc kubenswrapper[4711]: E1205 12:21:26.408075 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qsc4k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-6lzpv_openshift-operators(d56ccf44-6d74-47a4-93f2-d242ad353756): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:21:26 crc kubenswrapper[4711]: E1205 12:21:26.409337 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" podUID="d56ccf44-6d74-47a4-93f2-d242ad353756" Dec 05 12:21:27 crc kubenswrapper[4711]: E1205 12:21:27.023367 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" podUID="d56ccf44-6d74-47a4-93f2-d242ad353756" Dec 05 12:21:28 crc kubenswrapper[4711]: I1205 12:21:28.027029 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-brnw9" event={"ID":"0b910cd0-6b78-4758-a2b9-2a451b85ca3d","Type":"ContainerStarted","Data":"cb75fdbc36d23a66bf2f99d4f6f429308f613d99a7498f80eac51b15b8d767db"} Dec 05 12:21:28 crc kubenswrapper[4711]: I1205 12:21:28.027350 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:28 crc kubenswrapper[4711]: I1205 12:21:28.028810 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" event={"ID":"3edd2ed9-7a50-40f6-8a48-9398732a79cb","Type":"ContainerStarted","Data":"8596cf25dd2a0663fe1006bcc315286f017b3f350c98cbcdb0033b3c98b14cd7"} Dec 05 12:21:28 crc kubenswrapper[4711]: I1205 12:21:28.059500 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-brnw9" podStartSLOduration=2.649198433 podStartE2EDuration="24.059485598s" podCreationTimestamp="2025-12-05 12:21:04 +0000 UTC" firstStartedPulling="2025-12-05 12:21:05.606850351 +0000 UTC m=+711.191172681" lastFinishedPulling="2025-12-05 12:21:27.017137516 +0000 UTC m=+732.601459846" observedRunningTime="2025-12-05 12:21:28.04911558 +0000 UTC m=+733.633437910" watchObservedRunningTime="2025-12-05 12:21:28.059485598 +0000 UTC m=+733.643807928" Dec 05 12:21:28 crc kubenswrapper[4711]: I1205 12:21:28.067738 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-nhj4j" podStartSLOduration=2.487155148 podStartE2EDuration="24.067718822s" podCreationTimestamp="2025-12-05 12:21:04 +0000 UTC" firstStartedPulling="2025-12-05 12:21:05.324105614 +0000 UTC m=+710.908427954" lastFinishedPulling="2025-12-05 12:21:26.904669298 +0000 UTC m=+732.488991628" observedRunningTime="2025-12-05 12:21:28.067074995 +0000 UTC m=+733.651397325" watchObservedRunningTime="2025-12-05 12:21:28.067718822 +0000 UTC m=+733.652041152" Dec 05 12:21:35 crc kubenswrapper[4711]: I1205 12:21:35.298299 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-brnw9" Dec 05 12:21:38 crc kubenswrapper[4711]: I1205 12:21:38.089524 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" event={"ID":"8d47e5b8-d669-4947-b439-eaa06a31187e","Type":"ContainerStarted","Data":"8077f643d27a6f87877126ae1b7f119396281c932ec89a921aed7d8bba9ab258"} Dec 05 12:21:38 crc kubenswrapper[4711]: I1205 12:21:38.107966 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-g96b2" podStartSLOduration=1.795275489 podStartE2EDuration="34.10794717s" podCreationTimestamp="2025-12-05 12:21:04 +0000 UTC" firstStartedPulling="2025-12-05 12:21:05.166171392 +0000 UTC m=+710.750493722" lastFinishedPulling="2025-12-05 12:21:37.478843073 +0000 UTC m=+743.063165403" observedRunningTime="2025-12-05 12:21:38.105769795 +0000 UTC m=+743.690092145" watchObservedRunningTime="2025-12-05 12:21:38.10794717 +0000 UTC m=+743.692269500" Dec 05 12:21:40 crc kubenswrapper[4711]: I1205 12:21:40.101996 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" event={"ID":"f7e16934-1f3b-4272-ac69-4bcc6eec482f","Type":"ContainerStarted","Data":"ceca11a51e8787aca4e20724f76e766bddac6fe96417b03b180c3842d0966a2c"} Dec 05 12:21:40 crc kubenswrapper[4711]: I1205 12:21:40.105124 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" event={"ID":"d56ccf44-6d74-47a4-93f2-d242ad353756","Type":"ContainerStarted","Data":"b6d6986e76b3d7a4f2d6ee816e0f59f3c536675a8cb15d471753fcedabaffba5"} Dec 05 12:21:40 crc kubenswrapper[4711]: I1205 12:21:40.105447 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:40 crc kubenswrapper[4711]: I1205 12:21:40.118276 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" Dec 05 12:21:40 crc kubenswrapper[4711]: I1205 12:21:40.133823 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c959d4656-ng96c" podStartSLOduration=-9223372000.720972 podStartE2EDuration="36.133804142s" podCreationTimestamp="2025-12-05 12:21:04 +0000 UTC" firstStartedPulling="2025-12-05 12:21:05.342343565 +0000 UTC m=+710.926665915" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:21:40.133739611 +0000 UTC m=+745.718061941" watchObservedRunningTime="2025-12-05 12:21:40.133804142 +0000 UTC m=+745.718126472" Dec 05 12:21:40 crc kubenswrapper[4711]: I1205 12:21:40.162119 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-6lzpv" podStartSLOduration=2.200157807 podStartE2EDuration="36.162101494s" podCreationTimestamp="2025-12-05 12:21:04 +0000 UTC" firstStartedPulling="2025-12-05 12:21:05.513312789 +0000 UTC m=+711.097635119" lastFinishedPulling="2025-12-05 12:21:39.475256476 +0000 UTC m=+745.059578806" observedRunningTime="2025-12-05 12:21:40.158531335 +0000 UTC m=+745.742853675" watchObservedRunningTime="2025-12-05 12:21:40.162101494 +0000 UTC m=+745.746423824" Dec 05 12:21:48 crc kubenswrapper[4711]: I1205 12:21:48.301645 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:21:48 crc kubenswrapper[4711]: I1205 12:21:48.303502 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:21:57 crc kubenswrapper[4711]: I1205 12:21:57.730812 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l"] Dec 05 12:21:57 crc kubenswrapper[4711]: I1205 12:21:57.733467 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:57 crc kubenswrapper[4711]: I1205 12:21:57.736851 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 12:21:57 crc kubenswrapper[4711]: I1205 12:21:57.744642 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l"] Dec 05 12:21:57 crc kubenswrapper[4711]: I1205 12:21:57.930618 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:57 crc kubenswrapper[4711]: I1205 12:21:57.930714 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlwgs\" (UniqueName: \"kubernetes.io/projected/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-kube-api-access-wlwgs\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:57 crc kubenswrapper[4711]: I1205 12:21:57.930754 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:58 crc kubenswrapper[4711]: I1205 12:21:58.031538 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:58 crc kubenswrapper[4711]: I1205 12:21:58.031602 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:58 crc kubenswrapper[4711]: I1205 12:21:58.031657 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlwgs\" (UniqueName: \"kubernetes.io/projected/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-kube-api-access-wlwgs\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:58 crc kubenswrapper[4711]: I1205 12:21:58.032013 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:58 crc kubenswrapper[4711]: I1205 12:21:58.032089 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:58 crc kubenswrapper[4711]: I1205 12:21:58.051026 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlwgs\" (UniqueName: \"kubernetes.io/projected/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-kube-api-access-wlwgs\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:58 crc kubenswrapper[4711]: I1205 12:21:58.349783 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:21:58 crc kubenswrapper[4711]: I1205 12:21:58.584958 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l"] Dec 05 12:21:58 crc kubenswrapper[4711]: W1205 12:21:58.593729 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd585a412_c9fb_45d1_a0ef_e26b9fc1ce84.slice/crio-555a8f74447f6984aed7ac4372e4760f28b208174f7b3a6482693de0284e6994 WatchSource:0}: Error finding container 555a8f74447f6984aed7ac4372e4760f28b208174f7b3a6482693de0284e6994: Status 404 returned error can't find the container with id 555a8f74447f6984aed7ac4372e4760f28b208174f7b3a6482693de0284e6994 Dec 05 12:21:59 crc kubenswrapper[4711]: I1205 12:21:59.210051 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" event={"ID":"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84","Type":"ContainerStarted","Data":"555a8f74447f6984aed7ac4372e4760f28b208174f7b3a6482693de0284e6994"} Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.076088 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rrtc6"] Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.077320 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.089251 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rrtc6"] Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.162831 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-utilities\") pod \"redhat-operators-rrtc6\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.162959 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-catalog-content\") pod \"redhat-operators-rrtc6\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.162993 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xzv9\" (UniqueName: \"kubernetes.io/projected/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-kube-api-access-8xzv9\") pod \"redhat-operators-rrtc6\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.264477 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-catalog-content\") pod \"redhat-operators-rrtc6\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.264539 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xzv9\" (UniqueName: \"kubernetes.io/projected/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-kube-api-access-8xzv9\") pod \"redhat-operators-rrtc6\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.264588 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-utilities\") pod \"redhat-operators-rrtc6\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.265075 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-catalog-content\") pod \"redhat-operators-rrtc6\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.265201 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-utilities\") pod \"redhat-operators-rrtc6\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.285917 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xzv9\" (UniqueName: \"kubernetes.io/projected/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-kube-api-access-8xzv9\") pod \"redhat-operators-rrtc6\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.393030 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:00 crc kubenswrapper[4711]: I1205 12:22:00.691418 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rrtc6"] Dec 05 12:22:00 crc kubenswrapper[4711]: W1205 12:22:00.715807 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3406bf7d_10e1_4b2e_9cdf_0b4ca8d490f5.slice/crio-32ad432a8531ae09f6b1874fcd6f43aa8565e4272155162b8c0fcd42a2a7c7e4 WatchSource:0}: Error finding container 32ad432a8531ae09f6b1874fcd6f43aa8565e4272155162b8c0fcd42a2a7c7e4: Status 404 returned error can't find the container with id 32ad432a8531ae09f6b1874fcd6f43aa8565e4272155162b8c0fcd42a2a7c7e4 Dec 05 12:22:01 crc kubenswrapper[4711]: I1205 12:22:01.221841 4711 generic.go:334] "Generic (PLEG): container finished" podID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerID="515fc11bc1ad6a7f98135e9ee2279ad3a68d10068d2c777b4336d1599365f834" exitCode=0 Dec 05 12:22:01 crc kubenswrapper[4711]: I1205 12:22:01.221967 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" event={"ID":"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84","Type":"ContainerDied","Data":"515fc11bc1ad6a7f98135e9ee2279ad3a68d10068d2c777b4336d1599365f834"} Dec 05 12:22:01 crc kubenswrapper[4711]: I1205 12:22:01.223717 4711 generic.go:334] "Generic (PLEG): container finished" podID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerID="4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab" exitCode=0 Dec 05 12:22:01 crc kubenswrapper[4711]: I1205 12:22:01.223769 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrtc6" event={"ID":"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5","Type":"ContainerDied","Data":"4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab"} Dec 05 12:22:01 crc kubenswrapper[4711]: I1205 12:22:01.223799 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrtc6" event={"ID":"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5","Type":"ContainerStarted","Data":"32ad432a8531ae09f6b1874fcd6f43aa8565e4272155162b8c0fcd42a2a7c7e4"} Dec 05 12:22:02 crc kubenswrapper[4711]: I1205 12:22:02.231080 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrtc6" event={"ID":"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5","Type":"ContainerStarted","Data":"03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23"} Dec 05 12:22:03 crc kubenswrapper[4711]: I1205 12:22:03.236876 4711 generic.go:334] "Generic (PLEG): container finished" podID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerID="da6840a9f4c7e0d5f03b1c790bdfcf89d4d4d2644515b67f256213eb3b5f0993" exitCode=0 Dec 05 12:22:03 crc kubenswrapper[4711]: I1205 12:22:03.236938 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" event={"ID":"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84","Type":"ContainerDied","Data":"da6840a9f4c7e0d5f03b1c790bdfcf89d4d4d2644515b67f256213eb3b5f0993"} Dec 05 12:22:03 crc kubenswrapper[4711]: I1205 12:22:03.238609 4711 generic.go:334] "Generic (PLEG): container finished" podID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerID="03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23" exitCode=0 Dec 05 12:22:03 crc kubenswrapper[4711]: I1205 12:22:03.238669 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrtc6" event={"ID":"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5","Type":"ContainerDied","Data":"03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23"} Dec 05 12:22:04 crc kubenswrapper[4711]: I1205 12:22:04.250558 4711 generic.go:334] "Generic (PLEG): container finished" podID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerID="0d330ae8c0c2c9de341aaa1f812ac710fe67599116bc50b712fddf69cbbecbcc" exitCode=0 Dec 05 12:22:04 crc kubenswrapper[4711]: I1205 12:22:04.250605 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" event={"ID":"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84","Type":"ContainerDied","Data":"0d330ae8c0c2c9de341aaa1f812ac710fe67599116bc50b712fddf69cbbecbcc"} Dec 05 12:22:04 crc kubenswrapper[4711]: I1205 12:22:04.254511 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrtc6" event={"ID":"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5","Type":"ContainerStarted","Data":"4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b"} Dec 05 12:22:04 crc kubenswrapper[4711]: I1205 12:22:04.285646 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rrtc6" podStartSLOduration=1.486557009 podStartE2EDuration="4.285628741s" podCreationTimestamp="2025-12-05 12:22:00 +0000 UTC" firstStartedPulling="2025-12-05 12:22:01.225070147 +0000 UTC m=+766.809392487" lastFinishedPulling="2025-12-05 12:22:04.024141889 +0000 UTC m=+769.608464219" observedRunningTime="2025-12-05 12:22:04.285136449 +0000 UTC m=+769.869458799" watchObservedRunningTime="2025-12-05 12:22:04.285628741 +0000 UTC m=+769.869951081" Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.497999 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.536957 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-util\") pod \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.537078 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlwgs\" (UniqueName: \"kubernetes.io/projected/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-kube-api-access-wlwgs\") pod \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.537105 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-bundle\") pod \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\" (UID: \"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84\") " Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.537838 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-bundle" (OuterVolumeSpecName: "bundle") pod "d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" (UID: "d585a412-c9fb-45d1-a0ef-e26b9fc1ce84"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.547699 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-kube-api-access-wlwgs" (OuterVolumeSpecName: "kube-api-access-wlwgs") pod "d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" (UID: "d585a412-c9fb-45d1-a0ef-e26b9fc1ce84"). InnerVolumeSpecName "kube-api-access-wlwgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.552143 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-util" (OuterVolumeSpecName: "util") pod "d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" (UID: "d585a412-c9fb-45d1-a0ef-e26b9fc1ce84"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.638889 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlwgs\" (UniqueName: \"kubernetes.io/projected/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-kube-api-access-wlwgs\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.638939 4711 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:05 crc kubenswrapper[4711]: I1205 12:22:05.638951 4711 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d585a412-c9fb-45d1-a0ef-e26b9fc1ce84-util\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:06 crc kubenswrapper[4711]: I1205 12:22:06.268161 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" event={"ID":"d585a412-c9fb-45d1-a0ef-e26b9fc1ce84","Type":"ContainerDied","Data":"555a8f74447f6984aed7ac4372e4760f28b208174f7b3a6482693de0284e6994"} Dec 05 12:22:06 crc kubenswrapper[4711]: I1205 12:22:06.268500 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="555a8f74447f6984aed7ac4372e4760f28b208174f7b3a6482693de0284e6994" Dec 05 12:22:06 crc kubenswrapper[4711]: I1205 12:22:06.268235 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.515648 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s"] Dec 05 12:22:08 crc kubenswrapper[4711]: E1205 12:22:08.515860 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerName="util" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.515871 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerName="util" Dec 05 12:22:08 crc kubenswrapper[4711]: E1205 12:22:08.515890 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerName="extract" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.515896 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerName="extract" Dec 05 12:22:08 crc kubenswrapper[4711]: E1205 12:22:08.515906 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerName="pull" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.515912 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerName="pull" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.516005 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d585a412-c9fb-45d1-a0ef-e26b9fc1ce84" containerName="extract" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.516437 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.518260 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.519167 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.520120 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-rcvbw" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.533214 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s"] Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.575600 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5r7qw\" (UniqueName: \"kubernetes.io/projected/27ec6a7b-d939-47c8-9fa4-0fa976c78cfc-kube-api-access-5r7qw\") pod \"nmstate-operator-5b5b58f5c8-mpk6s\" (UID: \"27ec6a7b-d939-47c8-9fa4-0fa976c78cfc\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.676839 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5r7qw\" (UniqueName: \"kubernetes.io/projected/27ec6a7b-d939-47c8-9fa4-0fa976c78cfc-kube-api-access-5r7qw\") pod \"nmstate-operator-5b5b58f5c8-mpk6s\" (UID: \"27ec6a7b-d939-47c8-9fa4-0fa976c78cfc\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.699125 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5r7qw\" (UniqueName: \"kubernetes.io/projected/27ec6a7b-d939-47c8-9fa4-0fa976c78cfc-kube-api-access-5r7qw\") pod \"nmstate-operator-5b5b58f5c8-mpk6s\" (UID: \"27ec6a7b-d939-47c8-9fa4-0fa976c78cfc\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s" Dec 05 12:22:08 crc kubenswrapper[4711]: I1205 12:22:08.831600 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s" Dec 05 12:22:09 crc kubenswrapper[4711]: I1205 12:22:09.046192 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s"] Dec 05 12:22:09 crc kubenswrapper[4711]: I1205 12:22:09.290732 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s" event={"ID":"27ec6a7b-d939-47c8-9fa4-0fa976c78cfc","Type":"ContainerStarted","Data":"c99a505a69e9dceea3b976a3c332673db1edb93aa91ee1a14d732ec2ce11553f"} Dec 05 12:22:10 crc kubenswrapper[4711]: I1205 12:22:10.393487 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:10 crc kubenswrapper[4711]: I1205 12:22:10.393539 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:10 crc kubenswrapper[4711]: I1205 12:22:10.453104 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:11 crc kubenswrapper[4711]: I1205 12:22:11.345634 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:12 crc kubenswrapper[4711]: I1205 12:22:12.313935 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s" event={"ID":"27ec6a7b-d939-47c8-9fa4-0fa976c78cfc","Type":"ContainerStarted","Data":"983a9f135250cce6fdf72b9d2dc98c29759e1c71943c1ce790d502bd2b47d489"} Dec 05 12:22:12 crc kubenswrapper[4711]: I1205 12:22:12.335144 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-mpk6s" podStartSLOduration=1.557712971 podStartE2EDuration="4.335125847s" podCreationTimestamp="2025-12-05 12:22:08 +0000 UTC" firstStartedPulling="2025-12-05 12:22:09.054988998 +0000 UTC m=+774.639311338" lastFinishedPulling="2025-12-05 12:22:11.832401884 +0000 UTC m=+777.416724214" observedRunningTime="2025-12-05 12:22:12.330738878 +0000 UTC m=+777.915061218" watchObservedRunningTime="2025-12-05 12:22:12.335125847 +0000 UTC m=+777.919448177" Dec 05 12:22:12 crc kubenswrapper[4711]: I1205 12:22:12.874071 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rrtc6"] Dec 05 12:22:13 crc kubenswrapper[4711]: I1205 12:22:13.318963 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rrtc6" podUID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerName="registry-server" containerID="cri-o://4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b" gracePeriod=2 Dec 05 12:22:14 crc kubenswrapper[4711]: I1205 12:22:14.856069 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.049903 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-utilities\") pod \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.049990 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-catalog-content\") pod \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.050026 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xzv9\" (UniqueName: \"kubernetes.io/projected/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-kube-api-access-8xzv9\") pod \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\" (UID: \"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5\") " Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.050881 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-utilities" (OuterVolumeSpecName: "utilities") pod "3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" (UID: "3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.056361 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-kube-api-access-8xzv9" (OuterVolumeSpecName: "kube-api-access-8xzv9") pod "3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" (UID: "3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5"). InnerVolumeSpecName "kube-api-access-8xzv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.151872 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.151913 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xzv9\" (UniqueName: \"kubernetes.io/projected/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-kube-api-access-8xzv9\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.159356 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" (UID: "3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.253194 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.334238 4711 generic.go:334] "Generic (PLEG): container finished" podID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerID="4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b" exitCode=0 Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.334296 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrtc6" event={"ID":"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5","Type":"ContainerDied","Data":"4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b"} Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.334309 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rrtc6" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.334324 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrtc6" event={"ID":"3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5","Type":"ContainerDied","Data":"32ad432a8531ae09f6b1874fcd6f43aa8565e4272155162b8c0fcd42a2a7c7e4"} Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.334341 4711 scope.go:117] "RemoveContainer" containerID="4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.353737 4711 scope.go:117] "RemoveContainer" containerID="03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.371011 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rrtc6"] Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.376611 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rrtc6"] Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.383957 4711 scope.go:117] "RemoveContainer" containerID="4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.402685 4711 scope.go:117] "RemoveContainer" containerID="4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b" Dec 05 12:22:15 crc kubenswrapper[4711]: E1205 12:22:15.403286 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b\": container with ID starting with 4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b not found: ID does not exist" containerID="4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.403345 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b"} err="failed to get container status \"4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b\": rpc error: code = NotFound desc = could not find container \"4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b\": container with ID starting with 4c06fe113bdc5237968a3f3008909b5a06584bb4b79a50bbf23589833647039b not found: ID does not exist" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.403613 4711 scope.go:117] "RemoveContainer" containerID="03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23" Dec 05 12:22:15 crc kubenswrapper[4711]: E1205 12:22:15.403935 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23\": container with ID starting with 03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23 not found: ID does not exist" containerID="03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.403963 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23"} err="failed to get container status \"03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23\": rpc error: code = NotFound desc = could not find container \"03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23\": container with ID starting with 03dde4bd889d3f7b84b49dfe4ebdc95cc09f89d031a0ce79ba210cd4f9fe2f23 not found: ID does not exist" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.403985 4711 scope.go:117] "RemoveContainer" containerID="4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab" Dec 05 12:22:15 crc kubenswrapper[4711]: E1205 12:22:15.404229 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab\": container with ID starting with 4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab not found: ID does not exist" containerID="4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab" Dec 05 12:22:15 crc kubenswrapper[4711]: I1205 12:22:15.404252 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab"} err="failed to get container status \"4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab\": rpc error: code = NotFound desc = could not find container \"4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab\": container with ID starting with 4311dcf5a3d1bcd644ee4a0f53902e6fd782ffd91343528928842a29a82b9cab not found: ID does not exist" Dec 05 12:22:16 crc kubenswrapper[4711]: I1205 12:22:16.690340 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" path="/var/lib/kubelet/pods/3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5/volumes" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.300692 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.300794 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.300876 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.301912 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"91470354b4a7faa05d09c834cdfee9e799a693b86070e148c7f8f23ec7c68fa3"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.302049 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://91470354b4a7faa05d09c834cdfee9e799a693b86070e148c7f8f23ec7c68fa3" gracePeriod=600 Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.871155 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8"] Dec 05 12:22:18 crc kubenswrapper[4711]: E1205 12:22:18.871738 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerName="extract-utilities" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.871764 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerName="extract-utilities" Dec 05 12:22:18 crc kubenswrapper[4711]: E1205 12:22:18.871781 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerName="registry-server" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.871790 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerName="registry-server" Dec 05 12:22:18 crc kubenswrapper[4711]: E1205 12:22:18.871814 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerName="extract-content" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.871824 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerName="extract-content" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.871943 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3406bf7d-10e1-4b2e-9cdf-0b4ca8d490f5" containerName="registry-server" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.872694 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.875661 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-ngddk" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.880198 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm"] Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.881371 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.884352 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.885173 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8"] Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.894063 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm"] Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.920245 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-j7clc"] Dec 05 12:22:18 crc kubenswrapper[4711]: I1205 12:22:18.920991 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.004985 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/58574466-bffd-4755-b6fb-09380f829468-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tlcnm\" (UID: \"58574466-bffd-4755-b6fb-09380f829468\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.005101 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rxv6\" (UniqueName: \"kubernetes.io/projected/58574466-bffd-4755-b6fb-09380f829468-kube-api-access-2rxv6\") pod \"nmstate-webhook-5f6d4c5ccb-tlcnm\" (UID: \"58574466-bffd-4755-b6fb-09380f829468\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.005129 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdk4t\" (UniqueName: \"kubernetes.io/projected/c35b2a97-230a-483e-8321-fa0b7bd8c593-kube-api-access-cdk4t\") pod \"nmstate-metrics-7f946cbc9-pnfb8\" (UID: \"c35b2a97-230a-483e-8321-fa0b7bd8c593\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.037653 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd"] Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.038503 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.043072 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.043284 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-jxgz4" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.043756 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.059083 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd"] Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.107118 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rxv6\" (UniqueName: \"kubernetes.io/projected/58574466-bffd-4755-b6fb-09380f829468-kube-api-access-2rxv6\") pod \"nmstate-webhook-5f6d4c5ccb-tlcnm\" (UID: \"58574466-bffd-4755-b6fb-09380f829468\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.107175 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdk4t\" (UniqueName: \"kubernetes.io/projected/c35b2a97-230a-483e-8321-fa0b7bd8c593-kube-api-access-cdk4t\") pod \"nmstate-metrics-7f946cbc9-pnfb8\" (UID: \"c35b2a97-230a-483e-8321-fa0b7bd8c593\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.107221 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4b791e02-e955-4407-a42e-98eedbff8135-nmstate-lock\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.107244 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4b791e02-e955-4407-a42e-98eedbff8135-dbus-socket\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.107267 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/58574466-bffd-4755-b6fb-09380f829468-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tlcnm\" (UID: \"58574466-bffd-4755-b6fb-09380f829468\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.107307 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvgrh\" (UniqueName: \"kubernetes.io/projected/4b791e02-e955-4407-a42e-98eedbff8135-kube-api-access-kvgrh\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.107335 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4b791e02-e955-4407-a42e-98eedbff8135-ovs-socket\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.117759 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/58574466-bffd-4755-b6fb-09380f829468-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tlcnm\" (UID: \"58574466-bffd-4755-b6fb-09380f829468\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.125513 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdk4t\" (UniqueName: \"kubernetes.io/projected/c35b2a97-230a-483e-8321-fa0b7bd8c593-kube-api-access-cdk4t\") pod \"nmstate-metrics-7f946cbc9-pnfb8\" (UID: \"c35b2a97-230a-483e-8321-fa0b7bd8c593\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.133985 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rxv6\" (UniqueName: \"kubernetes.io/projected/58574466-bffd-4755-b6fb-09380f829468-kube-api-access-2rxv6\") pod \"nmstate-webhook-5f6d4c5ccb-tlcnm\" (UID: \"58574466-bffd-4755-b6fb-09380f829468\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.193361 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.200850 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.208593 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvgrh\" (UniqueName: \"kubernetes.io/projected/4b791e02-e955-4407-a42e-98eedbff8135-kube-api-access-kvgrh\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.208646 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b56e7dfc-5e5e-48ac-949e-3b4704532748-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-4cxfd\" (UID: \"b56e7dfc-5e5e-48ac-949e-3b4704532748\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.208690 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4b791e02-e955-4407-a42e-98eedbff8135-ovs-socket\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.208730 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b56e7dfc-5e5e-48ac-949e-3b4704532748-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-4cxfd\" (UID: \"b56e7dfc-5e5e-48ac-949e-3b4704532748\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.208769 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rt54t\" (UniqueName: \"kubernetes.io/projected/b56e7dfc-5e5e-48ac-949e-3b4704532748-kube-api-access-rt54t\") pod \"nmstate-console-plugin-7fbb5f6569-4cxfd\" (UID: \"b56e7dfc-5e5e-48ac-949e-3b4704532748\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.208814 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4b791e02-e955-4407-a42e-98eedbff8135-nmstate-lock\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.208836 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4b791e02-e955-4407-a42e-98eedbff8135-dbus-socket\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.209257 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4b791e02-e955-4407-a42e-98eedbff8135-dbus-socket\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.209336 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4b791e02-e955-4407-a42e-98eedbff8135-ovs-socket\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.209380 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4b791e02-e955-4407-a42e-98eedbff8135-nmstate-lock\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.238080 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvgrh\" (UniqueName: \"kubernetes.io/projected/4b791e02-e955-4407-a42e-98eedbff8135-kube-api-access-kvgrh\") pod \"nmstate-handler-j7clc\" (UID: \"4b791e02-e955-4407-a42e-98eedbff8135\") " pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.238555 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.247834 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-75bb5b6bf-bvq25"] Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.248634 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.264520 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-75bb5b6bf-bvq25"] Dec 05 12:22:19 crc kubenswrapper[4711]: W1205 12:22:19.279390 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b791e02_e955_4407_a42e_98eedbff8135.slice/crio-b17494d65a275b7a040a38f7d8eed26ea750b18b37da890cbdb4211e4b257519 WatchSource:0}: Error finding container b17494d65a275b7a040a38f7d8eed26ea750b18b37da890cbdb4211e4b257519: Status 404 returned error can't find the container with id b17494d65a275b7a040a38f7d8eed26ea750b18b37da890cbdb4211e4b257519 Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.309626 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rt54t\" (UniqueName: \"kubernetes.io/projected/b56e7dfc-5e5e-48ac-949e-3b4704532748-kube-api-access-rt54t\") pod \"nmstate-console-plugin-7fbb5f6569-4cxfd\" (UID: \"b56e7dfc-5e5e-48ac-949e-3b4704532748\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.309723 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b56e7dfc-5e5e-48ac-949e-3b4704532748-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-4cxfd\" (UID: \"b56e7dfc-5e5e-48ac-949e-3b4704532748\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.309787 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b56e7dfc-5e5e-48ac-949e-3b4704532748-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-4cxfd\" (UID: \"b56e7dfc-5e5e-48ac-949e-3b4704532748\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.310958 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b56e7dfc-5e5e-48ac-949e-3b4704532748-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-4cxfd\" (UID: \"b56e7dfc-5e5e-48ac-949e-3b4704532748\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.314444 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b56e7dfc-5e5e-48ac-949e-3b4704532748-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-4cxfd\" (UID: \"b56e7dfc-5e5e-48ac-949e-3b4704532748\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.331938 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rt54t\" (UniqueName: \"kubernetes.io/projected/b56e7dfc-5e5e-48ac-949e-3b4704532748-kube-api-access-rt54t\") pod \"nmstate-console-plugin-7fbb5f6569-4cxfd\" (UID: \"b56e7dfc-5e5e-48ac-949e-3b4704532748\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.361844 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.386577 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="91470354b4a7faa05d09c834cdfee9e799a693b86070e148c7f8f23ec7c68fa3" exitCode=0 Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.386658 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"91470354b4a7faa05d09c834cdfee9e799a693b86070e148c7f8f23ec7c68fa3"} Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.386695 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"472c49a169e1c65e73aac62e2dd2cc7781a63dc02785bf64789fa6376616fd5d"} Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.386719 4711 scope.go:117] "RemoveContainer" containerID="aca3bfb6184ef2ff9caa7ac85c79c557d3673f665fd771a118b79c6b0a0a06b1" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.392169 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-j7clc" event={"ID":"4b791e02-e955-4407-a42e-98eedbff8135","Type":"ContainerStarted","Data":"b17494d65a275b7a040a38f7d8eed26ea750b18b37da890cbdb4211e4b257519"} Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.411174 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ce39a5a9-df81-4f68-8c7e-e467711cba9a-console-oauth-config\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.411509 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-oauth-serving-cert\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.411547 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-service-ca\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.411565 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ce39a5a9-df81-4f68-8c7e-e467711cba9a-console-serving-cert\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.411591 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-trusted-ca-bundle\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.411614 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-console-config\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.411639 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvqwq\" (UniqueName: \"kubernetes.io/projected/ce39a5a9-df81-4f68-8c7e-e467711cba9a-kube-api-access-rvqwq\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.505247 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8"] Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.513173 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ce39a5a9-df81-4f68-8c7e-e467711cba9a-console-oauth-config\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.513354 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-oauth-serving-cert\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.513501 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-service-ca\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.513591 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ce39a5a9-df81-4f68-8c7e-e467711cba9a-console-serving-cert\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.513711 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-trusted-ca-bundle\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.515260 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-console-config\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.513799 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-console-config\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.516247 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvqwq\" (UniqueName: \"kubernetes.io/projected/ce39a5a9-df81-4f68-8c7e-e467711cba9a-kube-api-access-rvqwq\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.516932 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-service-ca\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.517592 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-trusted-ca-bundle\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.517737 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ce39a5a9-df81-4f68-8c7e-e467711cba9a-oauth-serving-cert\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.524275 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ce39a5a9-df81-4f68-8c7e-e467711cba9a-console-serving-cert\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.534712 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvqwq\" (UniqueName: \"kubernetes.io/projected/ce39a5a9-df81-4f68-8c7e-e467711cba9a-kube-api-access-rvqwq\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.534734 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ce39a5a9-df81-4f68-8c7e-e467711cba9a-console-oauth-config\") pod \"console-75bb5b6bf-bvq25\" (UID: \"ce39a5a9-df81-4f68-8c7e-e467711cba9a\") " pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.572516 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.725045 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm"] Dec 05 12:22:19 crc kubenswrapper[4711]: W1205 12:22:19.732503 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58574466_bffd_4755_b6fb_09380f829468.slice/crio-c12e890fd79789077a986c865d1bde0833f2d08f6b677dbc686b4ab06f218380 WatchSource:0}: Error finding container c12e890fd79789077a986c865d1bde0833f2d08f6b677dbc686b4ab06f218380: Status 404 returned error can't find the container with id c12e890fd79789077a986c865d1bde0833f2d08f6b677dbc686b4ab06f218380 Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.751009 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-75bb5b6bf-bvq25"] Dec 05 12:22:19 crc kubenswrapper[4711]: W1205 12:22:19.753106 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce39a5a9_df81_4f68_8c7e_e467711cba9a.slice/crio-a1e99122f940d1de894e759e93a1b37b5ed55b562a19b576db23918970984502 WatchSource:0}: Error finding container a1e99122f940d1de894e759e93a1b37b5ed55b562a19b576db23918970984502: Status 404 returned error can't find the container with id a1e99122f940d1de894e759e93a1b37b5ed55b562a19b576db23918970984502 Dec 05 12:22:19 crc kubenswrapper[4711]: I1205 12:22:19.803574 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd"] Dec 05 12:22:19 crc kubenswrapper[4711]: W1205 12:22:19.812723 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb56e7dfc_5e5e_48ac_949e_3b4704532748.slice/crio-92598476c3f6457b68c718c39a6389d901d2d2a40d706912c59dae1a61ad20b4 WatchSource:0}: Error finding container 92598476c3f6457b68c718c39a6389d901d2d2a40d706912c59dae1a61ad20b4: Status 404 returned error can't find the container with id 92598476c3f6457b68c718c39a6389d901d2d2a40d706912c59dae1a61ad20b4 Dec 05 12:22:20 crc kubenswrapper[4711]: I1205 12:22:20.401124 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-75bb5b6bf-bvq25" event={"ID":"ce39a5a9-df81-4f68-8c7e-e467711cba9a","Type":"ContainerStarted","Data":"6888b63d97d8821ff79bcdb7b63874aa8c3751a5f16ba462891193276b5449dc"} Dec 05 12:22:20 crc kubenswrapper[4711]: I1205 12:22:20.401593 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-75bb5b6bf-bvq25" event={"ID":"ce39a5a9-df81-4f68-8c7e-e467711cba9a","Type":"ContainerStarted","Data":"a1e99122f940d1de894e759e93a1b37b5ed55b562a19b576db23918970984502"} Dec 05 12:22:20 crc kubenswrapper[4711]: I1205 12:22:20.402661 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" event={"ID":"b56e7dfc-5e5e-48ac-949e-3b4704532748","Type":"ContainerStarted","Data":"92598476c3f6457b68c718c39a6389d901d2d2a40d706912c59dae1a61ad20b4"} Dec 05 12:22:20 crc kubenswrapper[4711]: I1205 12:22:20.407066 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8" event={"ID":"c35b2a97-230a-483e-8321-fa0b7bd8c593","Type":"ContainerStarted","Data":"8db684f5a226a0745b6ab1ca63856072084a65f9f026c5a1795f11a7debb63fa"} Dec 05 12:22:20 crc kubenswrapper[4711]: I1205 12:22:20.408103 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" event={"ID":"58574466-bffd-4755-b6fb-09380f829468","Type":"ContainerStarted","Data":"c12e890fd79789077a986c865d1bde0833f2d08f6b677dbc686b4ab06f218380"} Dec 05 12:22:20 crc kubenswrapper[4711]: I1205 12:22:20.422616 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-75bb5b6bf-bvq25" podStartSLOduration=1.4225989540000001 podStartE2EDuration="1.422598954s" podCreationTimestamp="2025-12-05 12:22:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:22:20.419180649 +0000 UTC m=+786.003502999" watchObservedRunningTime="2025-12-05 12:22:20.422598954 +0000 UTC m=+786.006921274" Dec 05 12:22:22 crc kubenswrapper[4711]: I1205 12:22:22.422706 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" event={"ID":"58574466-bffd-4755-b6fb-09380f829468","Type":"ContainerStarted","Data":"3e0d747807edf2f1defc234b9aed6c0f1e813631ca818ff81e2b589d4c28f7d0"} Dec 05 12:22:22 crc kubenswrapper[4711]: I1205 12:22:22.423113 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:22 crc kubenswrapper[4711]: I1205 12:22:22.424788 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-j7clc" event={"ID":"4b791e02-e955-4407-a42e-98eedbff8135","Type":"ContainerStarted","Data":"3bb01f4a9a6b7d9a6d8989508d53524ea9e64005f857b7b895fe188977849e5d"} Dec 05 12:22:22 crc kubenswrapper[4711]: I1205 12:22:22.424840 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:22 crc kubenswrapper[4711]: I1205 12:22:22.426897 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8" event={"ID":"c35b2a97-230a-483e-8321-fa0b7bd8c593","Type":"ContainerStarted","Data":"85bef078cc5a91a8a37e791f992d40b963802ef903f909a400554d03ae7a62f0"} Dec 05 12:22:22 crc kubenswrapper[4711]: I1205 12:22:22.428591 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" event={"ID":"b56e7dfc-5e5e-48ac-949e-3b4704532748","Type":"ContainerStarted","Data":"822d8adaf45a23d48fe4d10cdaf5194456f4a90ff8880d60748c5e6bead25f31"} Dec 05 12:22:22 crc kubenswrapper[4711]: I1205 12:22:22.449016 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" podStartSLOduration=2.187559657 podStartE2EDuration="4.448989861s" podCreationTimestamp="2025-12-05 12:22:18 +0000 UTC" firstStartedPulling="2025-12-05 12:22:19.740817471 +0000 UTC m=+785.325139801" lastFinishedPulling="2025-12-05 12:22:22.002247675 +0000 UTC m=+787.586570005" observedRunningTime="2025-12-05 12:22:22.445238977 +0000 UTC m=+788.029561307" watchObservedRunningTime="2025-12-05 12:22:22.448989861 +0000 UTC m=+788.033312191" Dec 05 12:22:22 crc kubenswrapper[4711]: I1205 12:22:22.480765 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-j7clc" podStartSLOduration=1.772162349 podStartE2EDuration="4.480748637s" podCreationTimestamp="2025-12-05 12:22:18 +0000 UTC" firstStartedPulling="2025-12-05 12:22:19.282259434 +0000 UTC m=+784.866581764" lastFinishedPulling="2025-12-05 12:22:21.990845722 +0000 UTC m=+787.575168052" observedRunningTime="2025-12-05 12:22:22.477311493 +0000 UTC m=+788.061633823" watchObservedRunningTime="2025-12-05 12:22:22.480748637 +0000 UTC m=+788.065070967" Dec 05 12:22:22 crc kubenswrapper[4711]: I1205 12:22:22.495764 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4cxfd" podStartSLOduration=1.319308814 podStartE2EDuration="3.49574446s" podCreationTimestamp="2025-12-05 12:22:19 +0000 UTC" firstStartedPulling="2025-12-05 12:22:19.814840417 +0000 UTC m=+785.399162747" lastFinishedPulling="2025-12-05 12:22:21.991276063 +0000 UTC m=+787.575598393" observedRunningTime="2025-12-05 12:22:22.49253486 +0000 UTC m=+788.076857200" watchObservedRunningTime="2025-12-05 12:22:22.49574446 +0000 UTC m=+788.080066790" Dec 05 12:22:25 crc kubenswrapper[4711]: I1205 12:22:25.480460 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8" event={"ID":"c35b2a97-230a-483e-8321-fa0b7bd8c593","Type":"ContainerStarted","Data":"ed65437f4b22ef98b0b8fb38c03090b48e480ae85862193e27bf5d5575298a92"} Dec 05 12:22:25 crc kubenswrapper[4711]: I1205 12:22:25.496995 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-pnfb8" podStartSLOduration=2.27360159 podStartE2EDuration="7.496981183s" podCreationTimestamp="2025-12-05 12:22:18 +0000 UTC" firstStartedPulling="2025-12-05 12:22:19.520952071 +0000 UTC m=+785.105274401" lastFinishedPulling="2025-12-05 12:22:24.744331664 +0000 UTC m=+790.328653994" observedRunningTime="2025-12-05 12:22:25.495331372 +0000 UTC m=+791.079653702" watchObservedRunningTime="2025-12-05 12:22:25.496981183 +0000 UTC m=+791.081303513" Dec 05 12:22:29 crc kubenswrapper[4711]: I1205 12:22:29.268746 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-j7clc" Dec 05 12:22:29 crc kubenswrapper[4711]: I1205 12:22:29.573246 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:29 crc kubenswrapper[4711]: I1205 12:22:29.573338 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:29 crc kubenswrapper[4711]: I1205 12:22:29.578222 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:30 crc kubenswrapper[4711]: I1205 12:22:30.508591 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-75bb5b6bf-bvq25" Dec 05 12:22:30 crc kubenswrapper[4711]: I1205 12:22:30.552278 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-sr8f2"] Dec 05 12:22:39 crc kubenswrapper[4711]: I1205 12:22:39.210203 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tlcnm" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.788051 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mxrtx"] Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.790216 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.797203 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mxrtx"] Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.820923 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-utilities\") pod \"certified-operators-mxrtx\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.820969 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx5n8\" (UniqueName: \"kubernetes.io/projected/9782613a-7068-488d-bae3-3b2908d56cb0-kube-api-access-dx5n8\") pod \"certified-operators-mxrtx\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.821068 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-catalog-content\") pod \"certified-operators-mxrtx\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.922238 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-catalog-content\") pod \"certified-operators-mxrtx\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.922341 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-utilities\") pod \"certified-operators-mxrtx\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.922367 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx5n8\" (UniqueName: \"kubernetes.io/projected/9782613a-7068-488d-bae3-3b2908d56cb0-kube-api-access-dx5n8\") pod \"certified-operators-mxrtx\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.923006 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-catalog-content\") pod \"certified-operators-mxrtx\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.923130 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-utilities\") pod \"certified-operators-mxrtx\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:47 crc kubenswrapper[4711]: I1205 12:22:47.955634 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx5n8\" (UniqueName: \"kubernetes.io/projected/9782613a-7068-488d-bae3-3b2908d56cb0-kube-api-access-dx5n8\") pod \"certified-operators-mxrtx\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:48 crc kubenswrapper[4711]: I1205 12:22:48.116455 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:48 crc kubenswrapper[4711]: I1205 12:22:48.426078 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mxrtx"] Dec 05 12:22:48 crc kubenswrapper[4711]: I1205 12:22:48.616248 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxrtx" event={"ID":"9782613a-7068-488d-bae3-3b2908d56cb0","Type":"ContainerStarted","Data":"ae6cb5fa82594e63a276b8c9a5f4f26bb89b969272094630ac52b0b3804aa1b5"} Dec 05 12:22:48 crc kubenswrapper[4711]: I1205 12:22:48.616774 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxrtx" event={"ID":"9782613a-7068-488d-bae3-3b2908d56cb0","Type":"ContainerStarted","Data":"fe51d62ca5257491c8d0d0dd38f25ef067f44a13fbdf02a92f102ee5aca61e86"} Dec 05 12:22:49 crc kubenswrapper[4711]: I1205 12:22:49.623841 4711 generic.go:334] "Generic (PLEG): container finished" podID="9782613a-7068-488d-bae3-3b2908d56cb0" containerID="ae6cb5fa82594e63a276b8c9a5f4f26bb89b969272094630ac52b0b3804aa1b5" exitCode=0 Dec 05 12:22:49 crc kubenswrapper[4711]: I1205 12:22:49.623934 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxrtx" event={"ID":"9782613a-7068-488d-bae3-3b2908d56cb0","Type":"ContainerDied","Data":"ae6cb5fa82594e63a276b8c9a5f4f26bb89b969272094630ac52b0b3804aa1b5"} Dec 05 12:22:50 crc kubenswrapper[4711]: I1205 12:22:50.633875 4711 generic.go:334] "Generic (PLEG): container finished" podID="9782613a-7068-488d-bae3-3b2908d56cb0" containerID="2af8857440bddded0cdf37d04a673e4ccf7188d343a1b7b7f1fe514a70103cb3" exitCode=0 Dec 05 12:22:50 crc kubenswrapper[4711]: I1205 12:22:50.634084 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxrtx" event={"ID":"9782613a-7068-488d-bae3-3b2908d56cb0","Type":"ContainerDied","Data":"2af8857440bddded0cdf37d04a673e4ccf7188d343a1b7b7f1fe514a70103cb3"} Dec 05 12:22:51 crc kubenswrapper[4711]: I1205 12:22:51.642333 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxrtx" event={"ID":"9782613a-7068-488d-bae3-3b2908d56cb0","Type":"ContainerStarted","Data":"90731d7280bda1570576766d70de9cd8b9aec494672fb00709a3ff23e2bbdb9b"} Dec 05 12:22:51 crc kubenswrapper[4711]: I1205 12:22:51.661817 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mxrtx" podStartSLOduration=2.183979478 podStartE2EDuration="4.661791306s" podCreationTimestamp="2025-12-05 12:22:47 +0000 UTC" firstStartedPulling="2025-12-05 12:22:48.618310255 +0000 UTC m=+814.202632585" lastFinishedPulling="2025-12-05 12:22:51.096122093 +0000 UTC m=+816.680444413" observedRunningTime="2025-12-05 12:22:51.656921515 +0000 UTC m=+817.241243865" watchObservedRunningTime="2025-12-05 12:22:51.661791306 +0000 UTC m=+817.246113646" Dec 05 12:22:53 crc kubenswrapper[4711]: I1205 12:22:53.786951 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr"] Dec 05 12:22:53 crc kubenswrapper[4711]: I1205 12:22:53.789419 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:53 crc kubenswrapper[4711]: I1205 12:22:53.791546 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 12:22:53 crc kubenswrapper[4711]: I1205 12:22:53.799046 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr"] Dec 05 12:22:53 crc kubenswrapper[4711]: I1205 12:22:53.909499 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:53 crc kubenswrapper[4711]: I1205 12:22:53.909574 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:53 crc kubenswrapper[4711]: I1205 12:22:53.909630 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x749j\" (UniqueName: \"kubernetes.io/projected/0821eaca-5689-41bb-99e0-d717bcafc885-kube-api-access-x749j\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.011233 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.011309 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x749j\" (UniqueName: \"kubernetes.io/projected/0821eaca-5689-41bb-99e0-d717bcafc885-kube-api-access-x749j\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.011352 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.011931 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.012117 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.030187 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x749j\" (UniqueName: \"kubernetes.io/projected/0821eaca-5689-41bb-99e0-d717bcafc885-kube-api-access-x749j\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.104910 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.308838 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr"] Dec 05 12:22:54 crc kubenswrapper[4711]: W1205 12:22:54.316022 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0821eaca_5689_41bb_99e0_d717bcafc885.slice/crio-c7f275eced55063a83d54545ec159fb42df59d68bf61443d325c1af589d77f54 WatchSource:0}: Error finding container c7f275eced55063a83d54545ec159fb42df59d68bf61443d325c1af589d77f54: Status 404 returned error can't find the container with id c7f275eced55063a83d54545ec159fb42df59d68bf61443d325c1af589d77f54 Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.662720 4711 generic.go:334] "Generic (PLEG): container finished" podID="0821eaca-5689-41bb-99e0-d717bcafc885" containerID="f8055421d915ded30ac5cb9c4b20260022ceb13c0adb2b916a9ef8b853976ac3" exitCode=0 Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.663015 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" event={"ID":"0821eaca-5689-41bb-99e0-d717bcafc885","Type":"ContainerDied","Data":"f8055421d915ded30ac5cb9c4b20260022ceb13c0adb2b916a9ef8b853976ac3"} Dec 05 12:22:54 crc kubenswrapper[4711]: I1205 12:22:54.663125 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" event={"ID":"0821eaca-5689-41bb-99e0-d717bcafc885","Type":"ContainerStarted","Data":"c7f275eced55063a83d54545ec159fb42df59d68bf61443d325c1af589d77f54"} Dec 05 12:22:55 crc kubenswrapper[4711]: I1205 12:22:55.591385 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-sr8f2" podUID="7f652b9a-5eb6-4066-84f7-dc1a7e09f038" containerName="console" containerID="cri-o://c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe" gracePeriod=15 Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.533827 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-sr8f2_7f652b9a-5eb6-4066-84f7-dc1a7e09f038/console/0.log" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.534500 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.643187 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-config\") pod \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.643309 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mspc\" (UniqueName: \"kubernetes.io/projected/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-kube-api-access-7mspc\") pod \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.643354 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-trusted-ca-bundle\") pod \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.643379 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-oauth-serving-cert\") pod \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.643413 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-service-ca\") pod \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644130 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "7f652b9a-5eb6-4066-84f7-dc1a7e09f038" (UID: "7f652b9a-5eb6-4066-84f7-dc1a7e09f038"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644141 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-service-ca" (OuterVolumeSpecName: "service-ca") pod "7f652b9a-5eb6-4066-84f7-dc1a7e09f038" (UID: "7f652b9a-5eb6-4066-84f7-dc1a7e09f038"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644154 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-config" (OuterVolumeSpecName: "console-config") pod "7f652b9a-5eb6-4066-84f7-dc1a7e09f038" (UID: "7f652b9a-5eb6-4066-84f7-dc1a7e09f038"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644168 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "7f652b9a-5eb6-4066-84f7-dc1a7e09f038" (UID: "7f652b9a-5eb6-4066-84f7-dc1a7e09f038"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644185 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-serving-cert\") pod \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644213 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-oauth-config\") pod \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\" (UID: \"7f652b9a-5eb6-4066-84f7-dc1a7e09f038\") " Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644443 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644454 4711 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644462 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.644470 4711 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.649693 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "7f652b9a-5eb6-4066-84f7-dc1a7e09f038" (UID: "7f652b9a-5eb6-4066-84f7-dc1a7e09f038"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.650252 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "7f652b9a-5eb6-4066-84f7-dc1a7e09f038" (UID: "7f652b9a-5eb6-4066-84f7-dc1a7e09f038"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.650747 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-kube-api-access-7mspc" (OuterVolumeSpecName: "kube-api-access-7mspc") pod "7f652b9a-5eb6-4066-84f7-dc1a7e09f038" (UID: "7f652b9a-5eb6-4066-84f7-dc1a7e09f038"). InnerVolumeSpecName "kube-api-access-7mspc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.681852 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-sr8f2_7f652b9a-5eb6-4066-84f7-dc1a7e09f038/console/0.log" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.681918 4711 generic.go:334] "Generic (PLEG): container finished" podID="7f652b9a-5eb6-4066-84f7-dc1a7e09f038" containerID="c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe" exitCode=2 Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.682216 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.682841 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sr8f2" event={"ID":"7f652b9a-5eb6-4066-84f7-dc1a7e09f038","Type":"ContainerDied","Data":"c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe"} Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.682940 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sr8f2" event={"ID":"7f652b9a-5eb6-4066-84f7-dc1a7e09f038","Type":"ContainerDied","Data":"ecb53aa652ff3c5b20bece632564f5a94d77f136177b68122942732272fce01a"} Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.682993 4711 scope.go:117] "RemoveContainer" containerID="c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.685879 4711 generic.go:334] "Generic (PLEG): container finished" podID="0821eaca-5689-41bb-99e0-d717bcafc885" containerID="916945152a4ff2c3dddbb4cc26a3f7004b9d7d8ab12bf571539de47a620a96f0" exitCode=0 Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.700881 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" event={"ID":"0821eaca-5689-41bb-99e0-d717bcafc885","Type":"ContainerDied","Data":"916945152a4ff2c3dddbb4cc26a3f7004b9d7d8ab12bf571539de47a620a96f0"} Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.705552 4711 scope.go:117] "RemoveContainer" containerID="c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe" Dec 05 12:22:56 crc kubenswrapper[4711]: E1205 12:22:56.705940 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe\": container with ID starting with c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe not found: ID does not exist" containerID="c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.705975 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe"} err="failed to get container status \"c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe\": rpc error: code = NotFound desc = could not find container \"c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe\": container with ID starting with c12fca455401bf8b87fe218b09965fca121421f3c0b81591ea609316740baefe not found: ID does not exist" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.745299 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mspc\" (UniqueName: \"kubernetes.io/projected/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-kube-api-access-7mspc\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.745334 4711 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:56 crc kubenswrapper[4711]: I1205 12:22:56.745347 4711 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7f652b9a-5eb6-4066-84f7-dc1a7e09f038-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:57 crc kubenswrapper[4711]: I1205 12:22:57.693992 4711 generic.go:334] "Generic (PLEG): container finished" podID="0821eaca-5689-41bb-99e0-d717bcafc885" containerID="cfa38e35d0ac4116701964be4f78f4d6f41a141ddc804ef4b4a86a75779f13a0" exitCode=0 Dec 05 12:22:57 crc kubenswrapper[4711]: I1205 12:22:57.694413 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" event={"ID":"0821eaca-5689-41bb-99e0-d717bcafc885","Type":"ContainerDied","Data":"cfa38e35d0ac4116701964be4f78f4d6f41a141ddc804ef4b4a86a75779f13a0"} Dec 05 12:22:58 crc kubenswrapper[4711]: I1205 12:22:58.116952 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:58 crc kubenswrapper[4711]: I1205 12:22:58.117279 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:58 crc kubenswrapper[4711]: I1205 12:22:58.184482 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:58 crc kubenswrapper[4711]: I1205 12:22:58.738314 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:22:58 crc kubenswrapper[4711]: I1205 12:22:58.932864 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.074985 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x749j\" (UniqueName: \"kubernetes.io/projected/0821eaca-5689-41bb-99e0-d717bcafc885-kube-api-access-x749j\") pod \"0821eaca-5689-41bb-99e0-d717bcafc885\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.075044 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-bundle\") pod \"0821eaca-5689-41bb-99e0-d717bcafc885\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.075070 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-util\") pod \"0821eaca-5689-41bb-99e0-d717bcafc885\" (UID: \"0821eaca-5689-41bb-99e0-d717bcafc885\") " Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.076258 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-bundle" (OuterVolumeSpecName: "bundle") pod "0821eaca-5689-41bb-99e0-d717bcafc885" (UID: "0821eaca-5689-41bb-99e0-d717bcafc885"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.081974 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0821eaca-5689-41bb-99e0-d717bcafc885-kube-api-access-x749j" (OuterVolumeSpecName: "kube-api-access-x749j") pod "0821eaca-5689-41bb-99e0-d717bcafc885" (UID: "0821eaca-5689-41bb-99e0-d717bcafc885"). InnerVolumeSpecName "kube-api-access-x749j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.094986 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-util" (OuterVolumeSpecName: "util") pod "0821eaca-5689-41bb-99e0-d717bcafc885" (UID: "0821eaca-5689-41bb-99e0-d717bcafc885"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.176691 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x749j\" (UniqueName: \"kubernetes.io/projected/0821eaca-5689-41bb-99e0-d717bcafc885-kube-api-access-x749j\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.176730 4711 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.176742 4711 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0821eaca-5689-41bb-99e0-d717bcafc885-util\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.708591 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" event={"ID":"0821eaca-5689-41bb-99e0-d717bcafc885","Type":"ContainerDied","Data":"c7f275eced55063a83d54545ec159fb42df59d68bf61443d325c1af589d77f54"} Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.708616 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr" Dec 05 12:22:59 crc kubenswrapper[4711]: I1205 12:22:59.708638 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7f275eced55063a83d54545ec159fb42df59d68bf61443d325c1af589d77f54" Dec 05 12:23:00 crc kubenswrapper[4711]: I1205 12:23:00.946942 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mxrtx"] Dec 05 12:23:00 crc kubenswrapper[4711]: I1205 12:23:00.947621 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mxrtx" podUID="9782613a-7068-488d-bae3-3b2908d56cb0" containerName="registry-server" containerID="cri-o://90731d7280bda1570576766d70de9cd8b9aec494672fb00709a3ff23e2bbdb9b" gracePeriod=2 Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.719803 4711 generic.go:334] "Generic (PLEG): container finished" podID="9782613a-7068-488d-bae3-3b2908d56cb0" containerID="90731d7280bda1570576766d70de9cd8b9aec494672fb00709a3ff23e2bbdb9b" exitCode=0 Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.719853 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxrtx" event={"ID":"9782613a-7068-488d-bae3-3b2908d56cb0","Type":"ContainerDied","Data":"90731d7280bda1570576766d70de9cd8b9aec494672fb00709a3ff23e2bbdb9b"} Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.805463 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.913441 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-catalog-content\") pod \"9782613a-7068-488d-bae3-3b2908d56cb0\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.913530 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-utilities\") pod \"9782613a-7068-488d-bae3-3b2908d56cb0\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.913584 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dx5n8\" (UniqueName: \"kubernetes.io/projected/9782613a-7068-488d-bae3-3b2908d56cb0-kube-api-access-dx5n8\") pod \"9782613a-7068-488d-bae3-3b2908d56cb0\" (UID: \"9782613a-7068-488d-bae3-3b2908d56cb0\") " Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.914474 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-utilities" (OuterVolumeSpecName: "utilities") pod "9782613a-7068-488d-bae3-3b2908d56cb0" (UID: "9782613a-7068-488d-bae3-3b2908d56cb0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.914621 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.918377 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9782613a-7068-488d-bae3-3b2908d56cb0-kube-api-access-dx5n8" (OuterVolumeSpecName: "kube-api-access-dx5n8") pod "9782613a-7068-488d-bae3-3b2908d56cb0" (UID: "9782613a-7068-488d-bae3-3b2908d56cb0"). InnerVolumeSpecName "kube-api-access-dx5n8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:23:01 crc kubenswrapper[4711]: I1205 12:23:01.963261 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9782613a-7068-488d-bae3-3b2908d56cb0" (UID: "9782613a-7068-488d-bae3-3b2908d56cb0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:23:02 crc kubenswrapper[4711]: I1205 12:23:02.014997 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dx5n8\" (UniqueName: \"kubernetes.io/projected/9782613a-7068-488d-bae3-3b2908d56cb0-kube-api-access-dx5n8\") on node \"crc\" DevicePath \"\"" Dec 05 12:23:02 crc kubenswrapper[4711]: I1205 12:23:02.015032 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9782613a-7068-488d-bae3-3b2908d56cb0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:23:02 crc kubenswrapper[4711]: I1205 12:23:02.729963 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxrtx" event={"ID":"9782613a-7068-488d-bae3-3b2908d56cb0","Type":"ContainerDied","Data":"fe51d62ca5257491c8d0d0dd38f25ef067f44a13fbdf02a92f102ee5aca61e86"} Dec 05 12:23:02 crc kubenswrapper[4711]: I1205 12:23:02.730020 4711 scope.go:117] "RemoveContainer" containerID="90731d7280bda1570576766d70de9cd8b9aec494672fb00709a3ff23e2bbdb9b" Dec 05 12:23:02 crc kubenswrapper[4711]: I1205 12:23:02.730160 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxrtx" Dec 05 12:23:02 crc kubenswrapper[4711]: I1205 12:23:02.758208 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mxrtx"] Dec 05 12:23:02 crc kubenswrapper[4711]: I1205 12:23:02.762128 4711 scope.go:117] "RemoveContainer" containerID="2af8857440bddded0cdf37d04a673e4ccf7188d343a1b7b7f1fe514a70103cb3" Dec 05 12:23:02 crc kubenswrapper[4711]: I1205 12:23:02.763438 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mxrtx"] Dec 05 12:23:02 crc kubenswrapper[4711]: I1205 12:23:02.785112 4711 scope.go:117] "RemoveContainer" containerID="ae6cb5fa82594e63a276b8c9a5f4f26bb89b969272094630ac52b0b3804aa1b5" Dec 05 12:23:04 crc kubenswrapper[4711]: I1205 12:23:04.702790 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9782613a-7068-488d-bae3-3b2908d56cb0" path="/var/lib/kubelet/pods/9782613a-7068-488d-bae3-3b2908d56cb0/volumes" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.028282 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k"] Dec 05 12:23:10 crc kubenswrapper[4711]: E1205 12:23:10.029135 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9782613a-7068-488d-bae3-3b2908d56cb0" containerName="registry-server" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029151 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9782613a-7068-488d-bae3-3b2908d56cb0" containerName="registry-server" Dec 05 12:23:10 crc kubenswrapper[4711]: E1205 12:23:10.029167 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9782613a-7068-488d-bae3-3b2908d56cb0" containerName="extract-utilities" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029176 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9782613a-7068-488d-bae3-3b2908d56cb0" containerName="extract-utilities" Dec 05 12:23:10 crc kubenswrapper[4711]: E1205 12:23:10.029192 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f652b9a-5eb6-4066-84f7-dc1a7e09f038" containerName="console" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029200 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f652b9a-5eb6-4066-84f7-dc1a7e09f038" containerName="console" Dec 05 12:23:10 crc kubenswrapper[4711]: E1205 12:23:10.029212 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9782613a-7068-488d-bae3-3b2908d56cb0" containerName="extract-content" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029219 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9782613a-7068-488d-bae3-3b2908d56cb0" containerName="extract-content" Dec 05 12:23:10 crc kubenswrapper[4711]: E1205 12:23:10.029228 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0821eaca-5689-41bb-99e0-d717bcafc885" containerName="pull" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029235 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0821eaca-5689-41bb-99e0-d717bcafc885" containerName="pull" Dec 05 12:23:10 crc kubenswrapper[4711]: E1205 12:23:10.029250 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0821eaca-5689-41bb-99e0-d717bcafc885" containerName="util" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029258 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0821eaca-5689-41bb-99e0-d717bcafc885" containerName="util" Dec 05 12:23:10 crc kubenswrapper[4711]: E1205 12:23:10.029268 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0821eaca-5689-41bb-99e0-d717bcafc885" containerName="extract" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029275 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0821eaca-5689-41bb-99e0-d717bcafc885" containerName="extract" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029421 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f652b9a-5eb6-4066-84f7-dc1a7e09f038" containerName="console" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029530 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0821eaca-5689-41bb-99e0-d717bcafc885" containerName="extract" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.029546 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9782613a-7068-488d-bae3-3b2908d56cb0" containerName="registry-server" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.032541 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.054312 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.054431 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.054642 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.054731 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-dtz6f" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.054904 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.066876 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k"] Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.117119 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ec342f47-c728-4826-8af2-576ec13046bf-apiservice-cert\") pod \"metallb-operator-controller-manager-7d9fb8bd49-lwf8k\" (UID: \"ec342f47-c728-4826-8af2-576ec13046bf\") " pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.117246 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ec342f47-c728-4826-8af2-576ec13046bf-webhook-cert\") pod \"metallb-operator-controller-manager-7d9fb8bd49-lwf8k\" (UID: \"ec342f47-c728-4826-8af2-576ec13046bf\") " pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.117283 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtp2f\" (UniqueName: \"kubernetes.io/projected/ec342f47-c728-4826-8af2-576ec13046bf-kube-api-access-dtp2f\") pod \"metallb-operator-controller-manager-7d9fb8bd49-lwf8k\" (UID: \"ec342f47-c728-4826-8af2-576ec13046bf\") " pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.218018 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ec342f47-c728-4826-8af2-576ec13046bf-webhook-cert\") pod \"metallb-operator-controller-manager-7d9fb8bd49-lwf8k\" (UID: \"ec342f47-c728-4826-8af2-576ec13046bf\") " pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.218077 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtp2f\" (UniqueName: \"kubernetes.io/projected/ec342f47-c728-4826-8af2-576ec13046bf-kube-api-access-dtp2f\") pod \"metallb-operator-controller-manager-7d9fb8bd49-lwf8k\" (UID: \"ec342f47-c728-4826-8af2-576ec13046bf\") " pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.218100 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ec342f47-c728-4826-8af2-576ec13046bf-apiservice-cert\") pod \"metallb-operator-controller-manager-7d9fb8bd49-lwf8k\" (UID: \"ec342f47-c728-4826-8af2-576ec13046bf\") " pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.227191 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ec342f47-c728-4826-8af2-576ec13046bf-webhook-cert\") pod \"metallb-operator-controller-manager-7d9fb8bd49-lwf8k\" (UID: \"ec342f47-c728-4826-8af2-576ec13046bf\") " pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.229112 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ec342f47-c728-4826-8af2-576ec13046bf-apiservice-cert\") pod \"metallb-operator-controller-manager-7d9fb8bd49-lwf8k\" (UID: \"ec342f47-c728-4826-8af2-576ec13046bf\") " pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.243409 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtp2f\" (UniqueName: \"kubernetes.io/projected/ec342f47-c728-4826-8af2-576ec13046bf-kube-api-access-dtp2f\") pod \"metallb-operator-controller-manager-7d9fb8bd49-lwf8k\" (UID: \"ec342f47-c728-4826-8af2-576ec13046bf\") " pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.262220 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls"] Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.262939 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.264856 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.265007 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.264966 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-64qhj" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.273642 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls"] Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.374533 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.420861 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8c3c5cf0-56f2-407a-99dc-c8abbc298527-apiservice-cert\") pod \"metallb-operator-webhook-server-545d776cd8-hwnls\" (UID: \"8c3c5cf0-56f2-407a-99dc-c8abbc298527\") " pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.420925 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8c3c5cf0-56f2-407a-99dc-c8abbc298527-webhook-cert\") pod \"metallb-operator-webhook-server-545d776cd8-hwnls\" (UID: \"8c3c5cf0-56f2-407a-99dc-c8abbc298527\") " pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.420949 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4psz\" (UniqueName: \"kubernetes.io/projected/8c3c5cf0-56f2-407a-99dc-c8abbc298527-kube-api-access-n4psz\") pod \"metallb-operator-webhook-server-545d776cd8-hwnls\" (UID: \"8c3c5cf0-56f2-407a-99dc-c8abbc298527\") " pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.522356 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8c3c5cf0-56f2-407a-99dc-c8abbc298527-apiservice-cert\") pod \"metallb-operator-webhook-server-545d776cd8-hwnls\" (UID: \"8c3c5cf0-56f2-407a-99dc-c8abbc298527\") " pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.522695 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8c3c5cf0-56f2-407a-99dc-c8abbc298527-webhook-cert\") pod \"metallb-operator-webhook-server-545d776cd8-hwnls\" (UID: \"8c3c5cf0-56f2-407a-99dc-c8abbc298527\") " pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.522713 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4psz\" (UniqueName: \"kubernetes.io/projected/8c3c5cf0-56f2-407a-99dc-c8abbc298527-kube-api-access-n4psz\") pod \"metallb-operator-webhook-server-545d776cd8-hwnls\" (UID: \"8c3c5cf0-56f2-407a-99dc-c8abbc298527\") " pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.535518 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8c3c5cf0-56f2-407a-99dc-c8abbc298527-apiservice-cert\") pod \"metallb-operator-webhook-server-545d776cd8-hwnls\" (UID: \"8c3c5cf0-56f2-407a-99dc-c8abbc298527\") " pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.535977 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8c3c5cf0-56f2-407a-99dc-c8abbc298527-webhook-cert\") pod \"metallb-operator-webhook-server-545d776cd8-hwnls\" (UID: \"8c3c5cf0-56f2-407a-99dc-c8abbc298527\") " pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.540189 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4psz\" (UniqueName: \"kubernetes.io/projected/8c3c5cf0-56f2-407a-99dc-c8abbc298527-kube-api-access-n4psz\") pod \"metallb-operator-webhook-server-545d776cd8-hwnls\" (UID: \"8c3c5cf0-56f2-407a-99dc-c8abbc298527\") " pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.587087 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.796903 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls"] Dec 05 12:23:10 crc kubenswrapper[4711]: W1205 12:23:10.805997 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c3c5cf0_56f2_407a_99dc_c8abbc298527.slice/crio-cf1b3365c0392145079ce75fd791248632a648a926949fce2a0b6cb51ec27305 WatchSource:0}: Error finding container cf1b3365c0392145079ce75fd791248632a648a926949fce2a0b6cb51ec27305: Status 404 returned error can't find the container with id cf1b3365c0392145079ce75fd791248632a648a926949fce2a0b6cb51ec27305 Dec 05 12:23:10 crc kubenswrapper[4711]: I1205 12:23:10.839469 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k"] Dec 05 12:23:11 crc kubenswrapper[4711]: I1205 12:23:11.786593 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" event={"ID":"ec342f47-c728-4826-8af2-576ec13046bf","Type":"ContainerStarted","Data":"6d2c9dea10c587bd6c481f24a36a2571e2dc7326e54a4c72951a7a5886d91f82"} Dec 05 12:23:11 crc kubenswrapper[4711]: I1205 12:23:11.788488 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" event={"ID":"8c3c5cf0-56f2-407a-99dc-c8abbc298527","Type":"ContainerStarted","Data":"cf1b3365c0392145079ce75fd791248632a648a926949fce2a0b6cb51ec27305"} Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.265874 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fjtd8"] Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.267364 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.282534 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fjtd8"] Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.298140 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-utilities\") pod \"community-operators-fjtd8\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.298203 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-catalog-content\") pod \"community-operators-fjtd8\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.298259 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrq5t\" (UniqueName: \"kubernetes.io/projected/bcdffc57-2f05-4da7-973a-ad6a2ae45090-kube-api-access-rrq5t\") pod \"community-operators-fjtd8\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.399246 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-utilities\") pod \"community-operators-fjtd8\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.399326 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-catalog-content\") pod \"community-operators-fjtd8\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.399413 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrq5t\" (UniqueName: \"kubernetes.io/projected/bcdffc57-2f05-4da7-973a-ad6a2ae45090-kube-api-access-rrq5t\") pod \"community-operators-fjtd8\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.400164 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-utilities\") pod \"community-operators-fjtd8\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.400256 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-catalog-content\") pod \"community-operators-fjtd8\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.438216 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrq5t\" (UniqueName: \"kubernetes.io/projected/bcdffc57-2f05-4da7-973a-ad6a2ae45090-kube-api-access-rrq5t\") pod \"community-operators-fjtd8\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:15 crc kubenswrapper[4711]: I1205 12:23:15.632281 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:16 crc kubenswrapper[4711]: I1205 12:23:16.844989 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" event={"ID":"8c3c5cf0-56f2-407a-99dc-c8abbc298527","Type":"ContainerStarted","Data":"a912eefda4fe86e58fd80507a7a5265299e33f43879340f886ca837a053665b9"} Dec 05 12:23:16 crc kubenswrapper[4711]: I1205 12:23:16.846220 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:16 crc kubenswrapper[4711]: I1205 12:23:16.876531 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" podStartSLOduration=1.155643138 podStartE2EDuration="6.876510775s" podCreationTimestamp="2025-12-05 12:23:10 +0000 UTC" firstStartedPulling="2025-12-05 12:23:10.813610689 +0000 UTC m=+836.397933019" lastFinishedPulling="2025-12-05 12:23:16.534478326 +0000 UTC m=+842.118800656" observedRunningTime="2025-12-05 12:23:16.870096246 +0000 UTC m=+842.454418576" watchObservedRunningTime="2025-12-05 12:23:16.876510775 +0000 UTC m=+842.460833105" Dec 05 12:23:17 crc kubenswrapper[4711]: I1205 12:23:17.067697 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fjtd8"] Dec 05 12:23:17 crc kubenswrapper[4711]: I1205 12:23:17.855053 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" event={"ID":"ec342f47-c728-4826-8af2-576ec13046bf","Type":"ContainerStarted","Data":"eb279ae27b06853996c03cf27d3548c8308a1efa654c9b3fa569e9f33b57380f"} Dec 05 12:23:17 crc kubenswrapper[4711]: I1205 12:23:17.856246 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:17 crc kubenswrapper[4711]: I1205 12:23:17.858707 4711 generic.go:334] "Generic (PLEG): container finished" podID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerID="7b275127e9d22d152d93ca4965e2526042ad29d9b2bb7af347a49bd8de0236e6" exitCode=0 Dec 05 12:23:17 crc kubenswrapper[4711]: I1205 12:23:17.859965 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjtd8" event={"ID":"bcdffc57-2f05-4da7-973a-ad6a2ae45090","Type":"ContainerDied","Data":"7b275127e9d22d152d93ca4965e2526042ad29d9b2bb7af347a49bd8de0236e6"} Dec 05 12:23:17 crc kubenswrapper[4711]: I1205 12:23:17.859996 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjtd8" event={"ID":"bcdffc57-2f05-4da7-973a-ad6a2ae45090","Type":"ContainerStarted","Data":"777cdb7e9cfb354169d7153c1e344d8d748492b52229b0aacccaaf93ec18a013"} Dec 05 12:23:17 crc kubenswrapper[4711]: I1205 12:23:17.883237 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" podStartSLOduration=2.211256048 podStartE2EDuration="7.883214692s" podCreationTimestamp="2025-12-05 12:23:10 +0000 UTC" firstStartedPulling="2025-12-05 12:23:10.851019387 +0000 UTC m=+836.435341717" lastFinishedPulling="2025-12-05 12:23:16.522978031 +0000 UTC m=+842.107300361" observedRunningTime="2025-12-05 12:23:17.883076159 +0000 UTC m=+843.467398539" watchObservedRunningTime="2025-12-05 12:23:17.883214692 +0000 UTC m=+843.467537042" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.355687 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rpfct"] Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.357495 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.375659 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpfct"] Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.446471 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc47d\" (UniqueName: \"kubernetes.io/projected/3120e6e2-d38a-4702-b7c8-0fbf90194b00-kube-api-access-vc47d\") pod \"redhat-marketplace-rpfct\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.446543 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-utilities\") pod \"redhat-marketplace-rpfct\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.446584 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-catalog-content\") pod \"redhat-marketplace-rpfct\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.547273 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc47d\" (UniqueName: \"kubernetes.io/projected/3120e6e2-d38a-4702-b7c8-0fbf90194b00-kube-api-access-vc47d\") pod \"redhat-marketplace-rpfct\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.547347 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-utilities\") pod \"redhat-marketplace-rpfct\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.547402 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-catalog-content\") pod \"redhat-marketplace-rpfct\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.547927 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-utilities\") pod \"redhat-marketplace-rpfct\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.547945 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-catalog-content\") pod \"redhat-marketplace-rpfct\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.575343 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc47d\" (UniqueName: \"kubernetes.io/projected/3120e6e2-d38a-4702-b7c8-0fbf90194b00-kube-api-access-vc47d\") pod \"redhat-marketplace-rpfct\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.676260 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.874459 4711 generic.go:334] "Generic (PLEG): container finished" podID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerID="0bca0f5b010b985fbbc2b74998a93a323d1e061fb8f7533ece5b0f75defc130c" exitCode=0 Dec 05 12:23:18 crc kubenswrapper[4711]: I1205 12:23:18.876168 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjtd8" event={"ID":"bcdffc57-2f05-4da7-973a-ad6a2ae45090","Type":"ContainerDied","Data":"0bca0f5b010b985fbbc2b74998a93a323d1e061fb8f7533ece5b0f75defc130c"} Dec 05 12:23:19 crc kubenswrapper[4711]: I1205 12:23:19.170774 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpfct"] Dec 05 12:23:19 crc kubenswrapper[4711]: W1205 12:23:19.175467 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3120e6e2_d38a_4702_b7c8_0fbf90194b00.slice/crio-8af3138f80434718f9211ee267ac80f5e9309162fc0d6a78092dc35fa4a26240 WatchSource:0}: Error finding container 8af3138f80434718f9211ee267ac80f5e9309162fc0d6a78092dc35fa4a26240: Status 404 returned error can't find the container with id 8af3138f80434718f9211ee267ac80f5e9309162fc0d6a78092dc35fa4a26240 Dec 05 12:23:19 crc kubenswrapper[4711]: I1205 12:23:19.883897 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpfct" event={"ID":"3120e6e2-d38a-4702-b7c8-0fbf90194b00","Type":"ContainerStarted","Data":"8af3138f80434718f9211ee267ac80f5e9309162fc0d6a78092dc35fa4a26240"} Dec 05 12:23:20 crc kubenswrapper[4711]: E1205 12:23:20.262632 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3120e6e2_d38a_4702_b7c8_0fbf90194b00.slice/crio-conmon-8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610.scope\": RecentStats: unable to find data in memory cache]" Dec 05 12:23:20 crc kubenswrapper[4711]: I1205 12:23:20.890708 4711 generic.go:334] "Generic (PLEG): container finished" podID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerID="8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610" exitCode=0 Dec 05 12:23:20 crc kubenswrapper[4711]: I1205 12:23:20.890809 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpfct" event={"ID":"3120e6e2-d38a-4702-b7c8-0fbf90194b00","Type":"ContainerDied","Data":"8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610"} Dec 05 12:23:20 crc kubenswrapper[4711]: I1205 12:23:20.896080 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjtd8" event={"ID":"bcdffc57-2f05-4da7-973a-ad6a2ae45090","Type":"ContainerStarted","Data":"09b1ac4e38ae04177ac56c09b0ed27081011e9c18b5f500a3a9520144fad932c"} Dec 05 12:23:20 crc kubenswrapper[4711]: I1205 12:23:20.931095 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fjtd8" podStartSLOduration=3.567088219 podStartE2EDuration="5.931078451s" podCreationTimestamp="2025-12-05 12:23:15 +0000 UTC" firstStartedPulling="2025-12-05 12:23:17.860752965 +0000 UTC m=+843.445075295" lastFinishedPulling="2025-12-05 12:23:20.224743197 +0000 UTC m=+845.809065527" observedRunningTime="2025-12-05 12:23:20.926240223 +0000 UTC m=+846.510562563" watchObservedRunningTime="2025-12-05 12:23:20.931078451 +0000 UTC m=+846.515400781" Dec 05 12:23:22 crc kubenswrapper[4711]: I1205 12:23:22.914887 4711 generic.go:334] "Generic (PLEG): container finished" podID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerID="daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236" exitCode=0 Dec 05 12:23:22 crc kubenswrapper[4711]: I1205 12:23:22.914999 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpfct" event={"ID":"3120e6e2-d38a-4702-b7c8-0fbf90194b00","Type":"ContainerDied","Data":"daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236"} Dec 05 12:23:23 crc kubenswrapper[4711]: I1205 12:23:23.923509 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpfct" event={"ID":"3120e6e2-d38a-4702-b7c8-0fbf90194b00","Type":"ContainerStarted","Data":"dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19"} Dec 05 12:23:23 crc kubenswrapper[4711]: I1205 12:23:23.942136 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rpfct" podStartSLOduration=3.47855011 podStartE2EDuration="5.942119861s" podCreationTimestamp="2025-12-05 12:23:18 +0000 UTC" firstStartedPulling="2025-12-05 12:23:20.892331879 +0000 UTC m=+846.476654209" lastFinishedPulling="2025-12-05 12:23:23.35590163 +0000 UTC m=+848.940223960" observedRunningTime="2025-12-05 12:23:23.938112352 +0000 UTC m=+849.522434682" watchObservedRunningTime="2025-12-05 12:23:23.942119861 +0000 UTC m=+849.526442191" Dec 05 12:23:25 crc kubenswrapper[4711]: I1205 12:23:25.633098 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:25 crc kubenswrapper[4711]: I1205 12:23:25.633503 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:25 crc kubenswrapper[4711]: I1205 12:23:25.688409 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:25 crc kubenswrapper[4711]: I1205 12:23:25.973733 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:26 crc kubenswrapper[4711]: I1205 12:23:26.706461 4711 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","pod7f652b9a-5eb6-4066-84f7-dc1a7e09f038"] err="unable to destroy cgroup paths for cgroup [kubepods burstable pod7f652b9a-5eb6-4066-84f7-dc1a7e09f038] : Timed out while waiting for systemd to remove kubepods-burstable-pod7f652b9a_5eb6_4066_84f7_dc1a7e09f038.slice" Dec 05 12:23:26 crc kubenswrapper[4711]: E1205 12:23:26.706561 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods burstable pod7f652b9a-5eb6-4066-84f7-dc1a7e09f038] : unable to destroy cgroup paths for cgroup [kubepods burstable pod7f652b9a-5eb6-4066-84f7-dc1a7e09f038] : Timed out while waiting for systemd to remove kubepods-burstable-pod7f652b9a_5eb6_4066_84f7_dc1a7e09f038.slice" pod="openshift-console/console-f9d7485db-sr8f2" podUID="7f652b9a-5eb6-4066-84f7-dc1a7e09f038" Dec 05 12:23:26 crc kubenswrapper[4711]: I1205 12:23:26.940006 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sr8f2" Dec 05 12:23:26 crc kubenswrapper[4711]: I1205 12:23:26.975711 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-sr8f2"] Dec 05 12:23:26 crc kubenswrapper[4711]: I1205 12:23:26.983711 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-sr8f2"] Dec 05 12:23:28 crc kubenswrapper[4711]: I1205 12:23:28.677021 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:28 crc kubenswrapper[4711]: I1205 12:23:28.677371 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:28 crc kubenswrapper[4711]: I1205 12:23:28.693182 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f652b9a-5eb6-4066-84f7-dc1a7e09f038" path="/var/lib/kubelet/pods/7f652b9a-5eb6-4066-84f7-dc1a7e09f038/volumes" Dec 05 12:23:28 crc kubenswrapper[4711]: I1205 12:23:28.725515 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:28 crc kubenswrapper[4711]: I1205 12:23:28.989544 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:29 crc kubenswrapper[4711]: I1205 12:23:29.347243 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fjtd8"] Dec 05 12:23:29 crc kubenswrapper[4711]: I1205 12:23:29.347818 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fjtd8" podUID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerName="registry-server" containerID="cri-o://09b1ac4e38ae04177ac56c09b0ed27081011e9c18b5f500a3a9520144fad932c" gracePeriod=2 Dec 05 12:23:30 crc kubenswrapper[4711]: I1205 12:23:30.592021 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-545d776cd8-hwnls" Dec 05 12:23:30 crc kubenswrapper[4711]: I1205 12:23:30.747883 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpfct"] Dec 05 12:23:30 crc kubenswrapper[4711]: I1205 12:23:30.966608 4711 generic.go:334] "Generic (PLEG): container finished" podID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerID="09b1ac4e38ae04177ac56c09b0ed27081011e9c18b5f500a3a9520144fad932c" exitCode=0 Dec 05 12:23:30 crc kubenswrapper[4711]: I1205 12:23:30.966738 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjtd8" event={"ID":"bcdffc57-2f05-4da7-973a-ad6a2ae45090","Type":"ContainerDied","Data":"09b1ac4e38ae04177ac56c09b0ed27081011e9c18b5f500a3a9520144fad932c"} Dec 05 12:23:30 crc kubenswrapper[4711]: I1205 12:23:30.967498 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjtd8" event={"ID":"bcdffc57-2f05-4da7-973a-ad6a2ae45090","Type":"ContainerDied","Data":"777cdb7e9cfb354169d7153c1e344d8d748492b52229b0aacccaaf93ec18a013"} Dec 05 12:23:30 crc kubenswrapper[4711]: I1205 12:23:30.967537 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="777cdb7e9cfb354169d7153c1e344d8d748492b52229b0aacccaaf93ec18a013" Dec 05 12:23:30 crc kubenswrapper[4711]: I1205 12:23:30.967928 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rpfct" podUID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerName="registry-server" containerID="cri-o://dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19" gracePeriod=2 Dec 05 12:23:30 crc kubenswrapper[4711]: I1205 12:23:30.968348 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.119652 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-utilities\") pod \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.119737 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-catalog-content\") pod \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.119806 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrq5t\" (UniqueName: \"kubernetes.io/projected/bcdffc57-2f05-4da7-973a-ad6a2ae45090-kube-api-access-rrq5t\") pod \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\" (UID: \"bcdffc57-2f05-4da7-973a-ad6a2ae45090\") " Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.121365 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-utilities" (OuterVolumeSpecName: "utilities") pod "bcdffc57-2f05-4da7-973a-ad6a2ae45090" (UID: "bcdffc57-2f05-4da7-973a-ad6a2ae45090"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.126616 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcdffc57-2f05-4da7-973a-ad6a2ae45090-kube-api-access-rrq5t" (OuterVolumeSpecName: "kube-api-access-rrq5t") pod "bcdffc57-2f05-4da7-973a-ad6a2ae45090" (UID: "bcdffc57-2f05-4da7-973a-ad6a2ae45090"). InnerVolumeSpecName "kube-api-access-rrq5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.198967 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bcdffc57-2f05-4da7-973a-ad6a2ae45090" (UID: "bcdffc57-2f05-4da7-973a-ad6a2ae45090"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.222122 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrq5t\" (UniqueName: \"kubernetes.io/projected/bcdffc57-2f05-4da7-973a-ad6a2ae45090-kube-api-access-rrq5t\") on node \"crc\" DevicePath \"\"" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.222193 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.222215 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcdffc57-2f05-4da7-973a-ad6a2ae45090-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.346864 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.527257 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc47d\" (UniqueName: \"kubernetes.io/projected/3120e6e2-d38a-4702-b7c8-0fbf90194b00-kube-api-access-vc47d\") pod \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.527382 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-catalog-content\") pod \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.527447 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-utilities\") pod \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\" (UID: \"3120e6e2-d38a-4702-b7c8-0fbf90194b00\") " Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.528491 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-utilities" (OuterVolumeSpecName: "utilities") pod "3120e6e2-d38a-4702-b7c8-0fbf90194b00" (UID: "3120e6e2-d38a-4702-b7c8-0fbf90194b00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.532556 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3120e6e2-d38a-4702-b7c8-0fbf90194b00-kube-api-access-vc47d" (OuterVolumeSpecName: "kube-api-access-vc47d") pod "3120e6e2-d38a-4702-b7c8-0fbf90194b00" (UID: "3120e6e2-d38a-4702-b7c8-0fbf90194b00"). InnerVolumeSpecName "kube-api-access-vc47d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.552046 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3120e6e2-d38a-4702-b7c8-0fbf90194b00" (UID: "3120e6e2-d38a-4702-b7c8-0fbf90194b00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.628465 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc47d\" (UniqueName: \"kubernetes.io/projected/3120e6e2-d38a-4702-b7c8-0fbf90194b00-kube-api-access-vc47d\") on node \"crc\" DevicePath \"\"" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.628500 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.628508 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3120e6e2-d38a-4702-b7c8-0fbf90194b00-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.977188 4711 generic.go:334] "Generic (PLEG): container finished" podID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerID="dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19" exitCode=0 Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.977230 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rpfct" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.977660 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjtd8" Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.977267 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpfct" event={"ID":"3120e6e2-d38a-4702-b7c8-0fbf90194b00","Type":"ContainerDied","Data":"dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19"} Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.977747 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpfct" event={"ID":"3120e6e2-d38a-4702-b7c8-0fbf90194b00","Type":"ContainerDied","Data":"8af3138f80434718f9211ee267ac80f5e9309162fc0d6a78092dc35fa4a26240"} Dec 05 12:23:31 crc kubenswrapper[4711]: I1205 12:23:31.977780 4711 scope.go:117] "RemoveContainer" containerID="dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.003790 4711 scope.go:117] "RemoveContainer" containerID="daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.039932 4711 scope.go:117] "RemoveContainer" containerID="8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.046820 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fjtd8"] Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.060890 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fjtd8"] Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.066890 4711 scope.go:117] "RemoveContainer" containerID="dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19" Dec 05 12:23:32 crc kubenswrapper[4711]: E1205 12:23:32.068791 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19\": container with ID starting with dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19 not found: ID does not exist" containerID="dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.068822 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19"} err="failed to get container status \"dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19\": rpc error: code = NotFound desc = could not find container \"dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19\": container with ID starting with dbfa2f01db3dc7ad8707532f324b8f855251d12679c8cd8ee3c762b496ce7c19 not found: ID does not exist" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.068845 4711 scope.go:117] "RemoveContainer" containerID="daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.068896 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpfct"] Dec 05 12:23:32 crc kubenswrapper[4711]: E1205 12:23:32.069273 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236\": container with ID starting with daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236 not found: ID does not exist" containerID="daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.069298 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236"} err="failed to get container status \"daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236\": rpc error: code = NotFound desc = could not find container \"daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236\": container with ID starting with daad2d1afb668e5d7e7cb0e11692d10ecefd2c9cbe5fc8665e00f79bb28a4236 not found: ID does not exist" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.069311 4711 scope.go:117] "RemoveContainer" containerID="8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610" Dec 05 12:23:32 crc kubenswrapper[4711]: E1205 12:23:32.069583 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610\": container with ID starting with 8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610 not found: ID does not exist" containerID="8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.069602 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610"} err="failed to get container status \"8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610\": rpc error: code = NotFound desc = could not find container \"8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610\": container with ID starting with 8f5767f046cc8946eadb7edfd467df505334b2cf6f407c3255e81e3d25a6d610 not found: ID does not exist" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.091361 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpfct"] Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.690658 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" path="/var/lib/kubelet/pods/3120e6e2-d38a-4702-b7c8-0fbf90194b00/volumes" Dec 05 12:23:32 crc kubenswrapper[4711]: I1205 12:23:32.692034 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" path="/var/lib/kubelet/pods/bcdffc57-2f05-4da7-973a-ad6a2ae45090/volumes" Dec 05 12:23:50 crc kubenswrapper[4711]: I1205 12:23:50.377285 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7d9fb8bd49-lwf8k" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.180655 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps"] Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.181292 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerName="extract-utilities" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.181317 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerName="extract-utilities" Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.181335 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerName="registry-server" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.181344 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerName="registry-server" Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.181365 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerName="extract-content" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.181374 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerName="extract-content" Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.181404 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerName="extract-utilities" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.181413 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerName="extract-utilities" Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.181428 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerName="registry-server" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.181437 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerName="registry-server" Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.181454 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerName="extract-content" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.181462 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerName="extract-content" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.181584 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcdffc57-2f05-4da7-973a-ad6a2ae45090" containerName="registry-server" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.181607 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3120e6e2-d38a-4702-b7c8-0fbf90194b00" containerName="registry-server" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.182106 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.184351 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-zzzrc" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.185103 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a5d2949f-9c51-4955-8782-150e18eca73b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-2wtps\" (UID: \"a5d2949f-9c51-4955-8782-150e18eca73b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.185195 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx2s2\" (UniqueName: \"kubernetes.io/projected/a5d2949f-9c51-4955-8782-150e18eca73b-kube-api-access-hx2s2\") pod \"frr-k8s-webhook-server-7fcb986d4-2wtps\" (UID: \"a5d2949f-9c51-4955-8782-150e18eca73b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.186035 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-rsfp8"] Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.186351 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.196917 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps"] Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.197117 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.201056 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.203127 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.288327 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a5d2949f-9c51-4955-8782-150e18eca73b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-2wtps\" (UID: \"a5d2949f-9c51-4955-8782-150e18eca73b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.288558 4711 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.288653 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a5d2949f-9c51-4955-8782-150e18eca73b-cert podName:a5d2949f-9c51-4955-8782-150e18eca73b nodeName:}" failed. No retries permitted until 2025-12-05 12:23:51.788624038 +0000 UTC m=+877.372946368 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a5d2949f-9c51-4955-8782-150e18eca73b-cert") pod "frr-k8s-webhook-server-7fcb986d4-2wtps" (UID: "a5d2949f-9c51-4955-8782-150e18eca73b") : secret "frr-k8s-webhook-server-cert" not found Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.288689 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx2s2\" (UniqueName: \"kubernetes.io/projected/a5d2949f-9c51-4955-8782-150e18eca73b-kube-api-access-hx2s2\") pod \"frr-k8s-webhook-server-7fcb986d4-2wtps\" (UID: \"a5d2949f-9c51-4955-8782-150e18eca73b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.290241 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-gfr4h"] Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.291662 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.294423 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.294561 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.294569 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-qkd92" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.301698 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-nhn9s"] Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.304268 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.305362 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.308806 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.316613 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-nhn9s"] Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.350710 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx2s2\" (UniqueName: \"kubernetes.io/projected/a5d2949f-9c51-4955-8782-150e18eca73b-kube-api-access-hx2s2\") pod \"frr-k8s-webhook-server-7fcb986d4-2wtps\" (UID: \"a5d2949f-9c51-4955-8782-150e18eca73b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391630 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-metrics-certs\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391738 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-memberlist\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391770 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8wjl\" (UniqueName: \"kubernetes.io/projected/d5bb0d55-e7da-466b-be2d-59b2d74553fe-kube-api-access-q8wjl\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391802 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb8ks\" (UniqueName: \"kubernetes.io/projected/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-kube-api-access-hb8ks\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391823 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-metrics\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391839 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-metrics-certs\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391869 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30154788-eb0d-42a1-8f53-a1cb1b0cdb8f-metrics-certs\") pod \"controller-f8648f98b-nhn9s\" (UID: \"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f\") " pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391890 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-reloader\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391939 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/30154788-eb0d-42a1-8f53-a1cb1b0cdb8f-cert\") pod \"controller-f8648f98b-nhn9s\" (UID: \"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f\") " pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391964 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-frr-sockets\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.391989 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-frr-startup\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.392007 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkqpj\" (UniqueName: \"kubernetes.io/projected/30154788-eb0d-42a1-8f53-a1cb1b0cdb8f-kube-api-access-gkqpj\") pod \"controller-f8648f98b-nhn9s\" (UID: \"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f\") " pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.392031 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d5bb0d55-e7da-466b-be2d-59b2d74553fe-metallb-excludel2\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.392050 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-frr-conf\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493121 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8wjl\" (UniqueName: \"kubernetes.io/projected/d5bb0d55-e7da-466b-be2d-59b2d74553fe-kube-api-access-q8wjl\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493199 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb8ks\" (UniqueName: \"kubernetes.io/projected/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-kube-api-access-hb8ks\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493232 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-metrics\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493248 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-metrics-certs\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493279 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30154788-eb0d-42a1-8f53-a1cb1b0cdb8f-metrics-certs\") pod \"controller-f8648f98b-nhn9s\" (UID: \"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f\") " pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493298 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-reloader\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493320 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/30154788-eb0d-42a1-8f53-a1cb1b0cdb8f-cert\") pod \"controller-f8648f98b-nhn9s\" (UID: \"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f\") " pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493347 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-frr-sockets\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493371 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-frr-startup\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493418 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkqpj\" (UniqueName: \"kubernetes.io/projected/30154788-eb0d-42a1-8f53-a1cb1b0cdb8f-kube-api-access-gkqpj\") pod \"controller-f8648f98b-nhn9s\" (UID: \"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f\") " pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493446 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d5bb0d55-e7da-466b-be2d-59b2d74553fe-metallb-excludel2\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493465 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-frr-conf\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493498 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-metrics-certs\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.493563 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-memberlist\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.493714 4711 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 12:23:51 crc kubenswrapper[4711]: E1205 12:23:51.493799 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-memberlist podName:d5bb0d55-e7da-466b-be2d-59b2d74553fe nodeName:}" failed. No retries permitted until 2025-12-05 12:23:51.993770065 +0000 UTC m=+877.578092395 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-memberlist") pod "speaker-gfr4h" (UID: "d5bb0d55-e7da-466b-be2d-59b2d74553fe") : secret "metallb-memberlist" not found Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.494184 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-reloader\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.494311 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-frr-sockets\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.494374 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-frr-conf\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.494536 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d5bb0d55-e7da-466b-be2d-59b2d74553fe-metallb-excludel2\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.494601 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-metrics\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.495547 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-frr-startup\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.497487 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.498705 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30154788-eb0d-42a1-8f53-a1cb1b0cdb8f-metrics-certs\") pod \"controller-f8648f98b-nhn9s\" (UID: \"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f\") " pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.503894 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-metrics-certs\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.506928 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/30154788-eb0d-42a1-8f53-a1cb1b0cdb8f-cert\") pod \"controller-f8648f98b-nhn9s\" (UID: \"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f\") " pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.509671 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8wjl\" (UniqueName: \"kubernetes.io/projected/d5bb0d55-e7da-466b-be2d-59b2d74553fe-kube-api-access-q8wjl\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.510181 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-metrics-certs\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.517297 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkqpj\" (UniqueName: \"kubernetes.io/projected/30154788-eb0d-42a1-8f53-a1cb1b0cdb8f-kube-api-access-gkqpj\") pod \"controller-f8648f98b-nhn9s\" (UID: \"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f\") " pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.522487 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb8ks\" (UniqueName: \"kubernetes.io/projected/b02b43b5-3133-49d0-bbbb-3d6aec73c79e-kube-api-access-hb8ks\") pod \"frr-k8s-rsfp8\" (UID: \"b02b43b5-3133-49d0-bbbb-3d6aec73c79e\") " pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.542658 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.621824 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.799557 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a5d2949f-9c51-4955-8782-150e18eca73b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-2wtps\" (UID: \"a5d2949f-9c51-4955-8782-150e18eca73b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:23:51 crc kubenswrapper[4711]: I1205 12:23:51.830437 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a5d2949f-9c51-4955-8782-150e18eca73b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-2wtps\" (UID: \"a5d2949f-9c51-4955-8782-150e18eca73b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:23:52 crc kubenswrapper[4711]: I1205 12:23:52.001005 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-memberlist\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:52 crc kubenswrapper[4711]: E1205 12:23:52.001195 4711 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 12:23:52 crc kubenswrapper[4711]: E1205 12:23:52.001280 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-memberlist podName:d5bb0d55-e7da-466b-be2d-59b2d74553fe nodeName:}" failed. No retries permitted until 2025-12-05 12:23:53.001260729 +0000 UTC m=+878.585583059 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-memberlist") pod "speaker-gfr4h" (UID: "d5bb0d55-e7da-466b-be2d-59b2d74553fe") : secret "metallb-memberlist" not found Dec 05 12:23:52 crc kubenswrapper[4711]: I1205 12:23:52.097643 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-nhn9s"] Dec 05 12:23:52 crc kubenswrapper[4711]: W1205 12:23:52.104083 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30154788_eb0d_42a1_8f53_a1cb1b0cdb8f.slice/crio-5f43a234deaf1ae5bc835c52442e9b2a9cc532c1130ec1cc2059407fd7c8dff2 WatchSource:0}: Error finding container 5f43a234deaf1ae5bc835c52442e9b2a9cc532c1130ec1cc2059407fd7c8dff2: Status 404 returned error can't find the container with id 5f43a234deaf1ae5bc835c52442e9b2a9cc532c1130ec1cc2059407fd7c8dff2 Dec 05 12:23:52 crc kubenswrapper[4711]: I1205 12:23:52.105832 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerStarted","Data":"039e6e60764b9370dae0b0116509f00cadaaf0245444564fb9b4903078afb709"} Dec 05 12:23:52 crc kubenswrapper[4711]: I1205 12:23:52.113141 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:23:52 crc kubenswrapper[4711]: I1205 12:23:52.518879 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps"] Dec 05 12:23:53 crc kubenswrapper[4711]: I1205 12:23:53.012512 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-memberlist\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:53 crc kubenswrapper[4711]: I1205 12:23:53.017940 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5bb0d55-e7da-466b-be2d-59b2d74553fe-memberlist\") pod \"speaker-gfr4h\" (UID: \"d5bb0d55-e7da-466b-be2d-59b2d74553fe\") " pod="metallb-system/speaker-gfr4h" Dec 05 12:23:53 crc kubenswrapper[4711]: I1205 12:23:53.108774 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-gfr4h" Dec 05 12:23:53 crc kubenswrapper[4711]: I1205 12:23:53.112729 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" event={"ID":"a5d2949f-9c51-4955-8782-150e18eca73b","Type":"ContainerStarted","Data":"a18adc8c38e31c25200608a963bba29d7d3d59982fe09e72c9e25873f9901243"} Dec 05 12:23:53 crc kubenswrapper[4711]: I1205 12:23:53.114828 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-nhn9s" event={"ID":"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f","Type":"ContainerStarted","Data":"fb1c7b58e224dd25ac9f2714db3fcc7dcc4f93b37655364e2ccdb2e372a537e0"} Dec 05 12:23:53 crc kubenswrapper[4711]: I1205 12:23:53.114876 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-nhn9s" event={"ID":"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f","Type":"ContainerStarted","Data":"0ecb5b9bf0a16b0ebef428b1a6ab2f4ebc607c0f61886058104c1a8904c8ac91"} Dec 05 12:23:53 crc kubenswrapper[4711]: I1205 12:23:53.114886 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-nhn9s" event={"ID":"30154788-eb0d-42a1-8f53-a1cb1b0cdb8f","Type":"ContainerStarted","Data":"5f43a234deaf1ae5bc835c52442e9b2a9cc532c1130ec1cc2059407fd7c8dff2"} Dec 05 12:23:53 crc kubenswrapper[4711]: I1205 12:23:53.114975 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:23:53 crc kubenswrapper[4711]: W1205 12:23:53.134538 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5bb0d55_e7da_466b_be2d_59b2d74553fe.slice/crio-4efb9a363d09e6b3b5bda6e19b6e417bc993fd19fd20f2cf37cddbddf0547fc5 WatchSource:0}: Error finding container 4efb9a363d09e6b3b5bda6e19b6e417bc993fd19fd20f2cf37cddbddf0547fc5: Status 404 returned error can't find the container with id 4efb9a363d09e6b3b5bda6e19b6e417bc993fd19fd20f2cf37cddbddf0547fc5 Dec 05 12:23:53 crc kubenswrapper[4711]: I1205 12:23:53.135667 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-nhn9s" podStartSLOduration=2.135652823 podStartE2EDuration="2.135652823s" podCreationTimestamp="2025-12-05 12:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:23:53.133211603 +0000 UTC m=+878.717533953" watchObservedRunningTime="2025-12-05 12:23:53.135652823 +0000 UTC m=+878.719975153" Dec 05 12:23:54 crc kubenswrapper[4711]: I1205 12:23:54.127200 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gfr4h" event={"ID":"d5bb0d55-e7da-466b-be2d-59b2d74553fe","Type":"ContainerStarted","Data":"c504b4558ad53f564ac9d2a742922fbc1aac687857323b2db9ad9fc97ea7a7e6"} Dec 05 12:23:54 crc kubenswrapper[4711]: I1205 12:23:54.127590 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gfr4h" event={"ID":"d5bb0d55-e7da-466b-be2d-59b2d74553fe","Type":"ContainerStarted","Data":"cb247c4701c846ebff368a59947841eabd1f7bcfadea7eb9944cef77551461ed"} Dec 05 12:23:54 crc kubenswrapper[4711]: I1205 12:23:54.127606 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gfr4h" event={"ID":"d5bb0d55-e7da-466b-be2d-59b2d74553fe","Type":"ContainerStarted","Data":"4efb9a363d09e6b3b5bda6e19b6e417bc993fd19fd20f2cf37cddbddf0547fc5"} Dec 05 12:23:54 crc kubenswrapper[4711]: I1205 12:23:54.127820 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-gfr4h" Dec 05 12:23:54 crc kubenswrapper[4711]: I1205 12:23:54.146749 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-gfr4h" podStartSLOduration=3.146727215 podStartE2EDuration="3.146727215s" podCreationTimestamp="2025-12-05 12:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:23:54.144940191 +0000 UTC m=+879.729262521" watchObservedRunningTime="2025-12-05 12:23:54.146727215 +0000 UTC m=+879.731049545" Dec 05 12:24:01 crc kubenswrapper[4711]: I1205 12:24:01.178000 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" event={"ID":"a5d2949f-9c51-4955-8782-150e18eca73b","Type":"ContainerStarted","Data":"35ce585965fe04fe46347eaa42ff0c8c67ce218fe57c8a2bd195b6e52e532e51"} Dec 05 12:24:01 crc kubenswrapper[4711]: I1205 12:24:01.178911 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:24:01 crc kubenswrapper[4711]: I1205 12:24:01.179633 4711 generic.go:334] "Generic (PLEG): container finished" podID="b02b43b5-3133-49d0-bbbb-3d6aec73c79e" containerID="e19b9b1b38177aded0c6ef2346fa7db18c69ffe01c1e6a3f668daedd094f6165" exitCode=0 Dec 05 12:24:01 crc kubenswrapper[4711]: I1205 12:24:01.179699 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerDied","Data":"e19b9b1b38177aded0c6ef2346fa7db18c69ffe01c1e6a3f668daedd094f6165"} Dec 05 12:24:01 crc kubenswrapper[4711]: I1205 12:24:01.202535 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" podStartSLOduration=2.697767221 podStartE2EDuration="10.202508333s" podCreationTimestamp="2025-12-05 12:23:51 +0000 UTC" firstStartedPulling="2025-12-05 12:23:52.537819887 +0000 UTC m=+878.122142217" lastFinishedPulling="2025-12-05 12:24:00.042561009 +0000 UTC m=+885.626883329" observedRunningTime="2025-12-05 12:24:01.194837474 +0000 UTC m=+886.779159804" watchObservedRunningTime="2025-12-05 12:24:01.202508333 +0000 UTC m=+886.786830673" Dec 05 12:24:02 crc kubenswrapper[4711]: I1205 12:24:02.187587 4711 generic.go:334] "Generic (PLEG): container finished" podID="b02b43b5-3133-49d0-bbbb-3d6aec73c79e" containerID="91a0b76de782fb5cca37a2a2555d369f5e114ee485a464d6b045e3eb618f1563" exitCode=0 Dec 05 12:24:02 crc kubenswrapper[4711]: I1205 12:24:02.187681 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerDied","Data":"91a0b76de782fb5cca37a2a2555d369f5e114ee485a464d6b045e3eb618f1563"} Dec 05 12:24:03 crc kubenswrapper[4711]: I1205 12:24:03.114799 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-gfr4h" Dec 05 12:24:03 crc kubenswrapper[4711]: I1205 12:24:03.195756 4711 generic.go:334] "Generic (PLEG): container finished" podID="b02b43b5-3133-49d0-bbbb-3d6aec73c79e" containerID="bd98bcbc3333fc4a830722aa061610a30e667652faace4ea709cdbb7176e7ad7" exitCode=0 Dec 05 12:24:03 crc kubenswrapper[4711]: I1205 12:24:03.195823 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerDied","Data":"bd98bcbc3333fc4a830722aa061610a30e667652faace4ea709cdbb7176e7ad7"} Dec 05 12:24:04 crc kubenswrapper[4711]: I1205 12:24:04.206248 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerStarted","Data":"44f6d6d9061a6024ca9319efd820fbf2e32cb5856c0258c7331a341472fa88d9"} Dec 05 12:24:04 crc kubenswrapper[4711]: I1205 12:24:04.206600 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerStarted","Data":"16bc0ef0be024b35002265ae7e5284d702cb87e823c902af7c8a7af678b7ae61"} Dec 05 12:24:04 crc kubenswrapper[4711]: I1205 12:24:04.206613 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerStarted","Data":"0ec0fe624b9042db3188fcb2bac0e78becd65aebb45970d2521dba9fd7cacd17"} Dec 05 12:24:04 crc kubenswrapper[4711]: I1205 12:24:04.206626 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerStarted","Data":"4a3409d2ea337ce2d9f2c955e6014730571a1b9ca9f4e85dbf99a58c4872c415"} Dec 05 12:24:04 crc kubenswrapper[4711]: I1205 12:24:04.206635 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerStarted","Data":"9d7c46f599b8015d5127f12eeabe3a1b8b7a95dd7f83a75bfa21b5aba7ddb079"} Dec 05 12:24:05 crc kubenswrapper[4711]: I1205 12:24:05.216895 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsfp8" event={"ID":"b02b43b5-3133-49d0-bbbb-3d6aec73c79e","Type":"ContainerStarted","Data":"10cf723d4739940dc6b8b436a79f0824688a076e1041fc04ca3d8eac261539ee"} Dec 05 12:24:05 crc kubenswrapper[4711]: I1205 12:24:05.217467 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:24:05 crc kubenswrapper[4711]: I1205 12:24:05.246694 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-rsfp8" podStartSLOduration=6.240378939 podStartE2EDuration="14.246674218s" podCreationTimestamp="2025-12-05 12:23:51 +0000 UTC" firstStartedPulling="2025-12-05 12:23:52.030855356 +0000 UTC m=+877.615177686" lastFinishedPulling="2025-12-05 12:24:00.037150635 +0000 UTC m=+885.621472965" observedRunningTime="2025-12-05 12:24:05.240853835 +0000 UTC m=+890.825176165" watchObservedRunningTime="2025-12-05 12:24:05.246674218 +0000 UTC m=+890.830996548" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.215010 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-tr449"] Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.215960 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-tr449" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.218099 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-zxhr4" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.218679 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.219313 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.229321 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-tr449"] Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.299520 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2mwc\" (UniqueName: \"kubernetes.io/projected/8c64c923-29e7-4237-9bb2-e1efee3c01e1-kube-api-access-j2mwc\") pod \"openstack-operator-index-tr449\" (UID: \"8c64c923-29e7-4237-9bb2-e1efee3c01e1\") " pod="openstack-operators/openstack-operator-index-tr449" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.411765 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2mwc\" (UniqueName: \"kubernetes.io/projected/8c64c923-29e7-4237-9bb2-e1efee3c01e1-kube-api-access-j2mwc\") pod \"openstack-operator-index-tr449\" (UID: \"8c64c923-29e7-4237-9bb2-e1efee3c01e1\") " pod="openstack-operators/openstack-operator-index-tr449" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.429826 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2mwc\" (UniqueName: \"kubernetes.io/projected/8c64c923-29e7-4237-9bb2-e1efee3c01e1-kube-api-access-j2mwc\") pod \"openstack-operator-index-tr449\" (UID: \"8c64c923-29e7-4237-9bb2-e1efee3c01e1\") " pod="openstack-operators/openstack-operator-index-tr449" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.533496 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-tr449" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.543025 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.593728 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:24:06 crc kubenswrapper[4711]: I1205 12:24:06.997378 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-tr449"] Dec 05 12:24:07 crc kubenswrapper[4711]: W1205 12:24:07.013782 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c64c923_29e7_4237_9bb2_e1efee3c01e1.slice/crio-bfbb51093dd740e9e1cc08171cd125a7776b371b6bfad0cf3c5ecc585354b9a8 WatchSource:0}: Error finding container bfbb51093dd740e9e1cc08171cd125a7776b371b6bfad0cf3c5ecc585354b9a8: Status 404 returned error can't find the container with id bfbb51093dd740e9e1cc08171cd125a7776b371b6bfad0cf3c5ecc585354b9a8 Dec 05 12:24:07 crc kubenswrapper[4711]: I1205 12:24:07.229261 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-tr449" event={"ID":"8c64c923-29e7-4237-9bb2-e1efee3c01e1","Type":"ContainerStarted","Data":"bfbb51093dd740e9e1cc08171cd125a7776b371b6bfad0cf3c5ecc585354b9a8"} Dec 05 12:24:09 crc kubenswrapper[4711]: I1205 12:24:09.592114 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-tr449"] Dec 05 12:24:10 crc kubenswrapper[4711]: I1205 12:24:10.204075 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-4ktx2"] Dec 05 12:24:10 crc kubenswrapper[4711]: I1205 12:24:10.205160 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4ktx2" Dec 05 12:24:10 crc kubenswrapper[4711]: I1205 12:24:10.222592 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4ktx2"] Dec 05 12:24:10 crc kubenswrapper[4711]: I1205 12:24:10.367510 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j84b4\" (UniqueName: \"kubernetes.io/projected/6de819ff-74b1-4645-a34c-434e3ac6eb60-kube-api-access-j84b4\") pod \"openstack-operator-index-4ktx2\" (UID: \"6de819ff-74b1-4645-a34c-434e3ac6eb60\") " pod="openstack-operators/openstack-operator-index-4ktx2" Dec 05 12:24:10 crc kubenswrapper[4711]: I1205 12:24:10.469185 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j84b4\" (UniqueName: \"kubernetes.io/projected/6de819ff-74b1-4645-a34c-434e3ac6eb60-kube-api-access-j84b4\") pod \"openstack-operator-index-4ktx2\" (UID: \"6de819ff-74b1-4645-a34c-434e3ac6eb60\") " pod="openstack-operators/openstack-operator-index-4ktx2" Dec 05 12:24:10 crc kubenswrapper[4711]: I1205 12:24:10.486836 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j84b4\" (UniqueName: \"kubernetes.io/projected/6de819ff-74b1-4645-a34c-434e3ac6eb60-kube-api-access-j84b4\") pod \"openstack-operator-index-4ktx2\" (UID: \"6de819ff-74b1-4645-a34c-434e3ac6eb60\") " pod="openstack-operators/openstack-operator-index-4ktx2" Dec 05 12:24:10 crc kubenswrapper[4711]: I1205 12:24:10.529746 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4ktx2" Dec 05 12:24:11 crc kubenswrapper[4711]: I1205 12:24:11.626472 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-nhn9s" Dec 05 12:24:12 crc kubenswrapper[4711]: I1205 12:24:12.119630 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2wtps" Dec 05 12:24:12 crc kubenswrapper[4711]: I1205 12:24:12.537741 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4ktx2"] Dec 05 12:24:13 crc kubenswrapper[4711]: I1205 12:24:13.263829 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-tr449" event={"ID":"8c64c923-29e7-4237-9bb2-e1efee3c01e1","Type":"ContainerStarted","Data":"a205bd87213498d78e446e75fc5d2178cddc9c0a799d1e99512afa2d181ef4ac"} Dec 05 12:24:13 crc kubenswrapper[4711]: I1205 12:24:13.264008 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-tr449" podUID="8c64c923-29e7-4237-9bb2-e1efee3c01e1" containerName="registry-server" containerID="cri-o://a205bd87213498d78e446e75fc5d2178cddc9c0a799d1e99512afa2d181ef4ac" gracePeriod=2 Dec 05 12:24:13 crc kubenswrapper[4711]: I1205 12:24:13.266494 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4ktx2" event={"ID":"6de819ff-74b1-4645-a34c-434e3ac6eb60","Type":"ContainerStarted","Data":"c41febcda7599c4d57edb6f8284cc339dfc6e5d29b15a2ada5764457f844aa8c"} Dec 05 12:24:13 crc kubenswrapper[4711]: I1205 12:24:13.285175 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-tr449" podStartSLOduration=1.9538902820000001 podStartE2EDuration="7.285154926s" podCreationTimestamp="2025-12-05 12:24:06 +0000 UTC" firstStartedPulling="2025-12-05 12:24:07.016252277 +0000 UTC m=+892.600574607" lastFinishedPulling="2025-12-05 12:24:12.347516931 +0000 UTC m=+897.931839251" observedRunningTime="2025-12-05 12:24:13.278275027 +0000 UTC m=+898.862597377" watchObservedRunningTime="2025-12-05 12:24:13.285154926 +0000 UTC m=+898.869477256" Dec 05 12:24:14 crc kubenswrapper[4711]: I1205 12:24:14.273755 4711 generic.go:334] "Generic (PLEG): container finished" podID="8c64c923-29e7-4237-9bb2-e1efee3c01e1" containerID="a205bd87213498d78e446e75fc5d2178cddc9c0a799d1e99512afa2d181ef4ac" exitCode=0 Dec 05 12:24:14 crc kubenswrapper[4711]: I1205 12:24:14.274142 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-tr449" event={"ID":"8c64c923-29e7-4237-9bb2-e1efee3c01e1","Type":"ContainerDied","Data":"a205bd87213498d78e446e75fc5d2178cddc9c0a799d1e99512afa2d181ef4ac"} Dec 05 12:24:14 crc kubenswrapper[4711]: I1205 12:24:14.275760 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4ktx2" event={"ID":"6de819ff-74b1-4645-a34c-434e3ac6eb60","Type":"ContainerStarted","Data":"75f519b4cce1b000a9686dd58da9d6a6287dda56ccb538634fde3dcb3ba134c8"} Dec 05 12:24:14 crc kubenswrapper[4711]: I1205 12:24:14.289948 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-4ktx2" podStartSLOduration=3.656290866 podStartE2EDuration="4.289932563s" podCreationTimestamp="2025-12-05 12:24:10 +0000 UTC" firstStartedPulling="2025-12-05 12:24:12.553057317 +0000 UTC m=+898.137379647" lastFinishedPulling="2025-12-05 12:24:13.186699014 +0000 UTC m=+898.771021344" observedRunningTime="2025-12-05 12:24:14.288313524 +0000 UTC m=+899.872635874" watchObservedRunningTime="2025-12-05 12:24:14.289932563 +0000 UTC m=+899.874254893" Dec 05 12:24:14 crc kubenswrapper[4711]: I1205 12:24:14.313403 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-tr449" Dec 05 12:24:14 crc kubenswrapper[4711]: I1205 12:24:14.422617 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2mwc\" (UniqueName: \"kubernetes.io/projected/8c64c923-29e7-4237-9bb2-e1efee3c01e1-kube-api-access-j2mwc\") pod \"8c64c923-29e7-4237-9bb2-e1efee3c01e1\" (UID: \"8c64c923-29e7-4237-9bb2-e1efee3c01e1\") " Dec 05 12:24:14 crc kubenswrapper[4711]: I1205 12:24:14.431226 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c64c923-29e7-4237-9bb2-e1efee3c01e1-kube-api-access-j2mwc" (OuterVolumeSpecName: "kube-api-access-j2mwc") pod "8c64c923-29e7-4237-9bb2-e1efee3c01e1" (UID: "8c64c923-29e7-4237-9bb2-e1efee3c01e1"). InnerVolumeSpecName "kube-api-access-j2mwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:24:14 crc kubenswrapper[4711]: I1205 12:24:14.524565 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2mwc\" (UniqueName: \"kubernetes.io/projected/8c64c923-29e7-4237-9bb2-e1efee3c01e1-kube-api-access-j2mwc\") on node \"crc\" DevicePath \"\"" Dec 05 12:24:15 crc kubenswrapper[4711]: I1205 12:24:15.284654 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-tr449" Dec 05 12:24:15 crc kubenswrapper[4711]: I1205 12:24:15.284653 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-tr449" event={"ID":"8c64c923-29e7-4237-9bb2-e1efee3c01e1","Type":"ContainerDied","Data":"bfbb51093dd740e9e1cc08171cd125a7776b371b6bfad0cf3c5ecc585354b9a8"} Dec 05 12:24:15 crc kubenswrapper[4711]: I1205 12:24:15.285220 4711 scope.go:117] "RemoveContainer" containerID="a205bd87213498d78e446e75fc5d2178cddc9c0a799d1e99512afa2d181ef4ac" Dec 05 12:24:15 crc kubenswrapper[4711]: I1205 12:24:15.309150 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-tr449"] Dec 05 12:24:15 crc kubenswrapper[4711]: I1205 12:24:15.314295 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-tr449"] Dec 05 12:24:16 crc kubenswrapper[4711]: I1205 12:24:16.690940 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c64c923-29e7-4237-9bb2-e1efee3c01e1" path="/var/lib/kubelet/pods/8c64c923-29e7-4237-9bb2-e1efee3c01e1/volumes" Dec 05 12:24:18 crc kubenswrapper[4711]: I1205 12:24:18.301160 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:24:18 crc kubenswrapper[4711]: I1205 12:24:18.301462 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:24:20 crc kubenswrapper[4711]: I1205 12:24:20.530777 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-4ktx2" Dec 05 12:24:20 crc kubenswrapper[4711]: I1205 12:24:20.531224 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-4ktx2" Dec 05 12:24:20 crc kubenswrapper[4711]: I1205 12:24:20.562028 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-4ktx2" Dec 05 12:24:21 crc kubenswrapper[4711]: I1205 12:24:21.348969 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-4ktx2" Dec 05 12:24:21 crc kubenswrapper[4711]: I1205 12:24:21.547184 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-rsfp8" Dec 05 12:24:27 crc kubenswrapper[4711]: I1205 12:24:27.838435 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj"] Dec 05 12:24:27 crc kubenswrapper[4711]: E1205 12:24:27.839085 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c64c923-29e7-4237-9bb2-e1efee3c01e1" containerName="registry-server" Dec 05 12:24:27 crc kubenswrapper[4711]: I1205 12:24:27.839100 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c64c923-29e7-4237-9bb2-e1efee3c01e1" containerName="registry-server" Dec 05 12:24:27 crc kubenswrapper[4711]: I1205 12:24:27.839257 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c64c923-29e7-4237-9bb2-e1efee3c01e1" containerName="registry-server" Dec 05 12:24:27 crc kubenswrapper[4711]: I1205 12:24:27.840378 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:27 crc kubenswrapper[4711]: I1205 12:24:27.842486 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-89xcm" Dec 05 12:24:27 crc kubenswrapper[4711]: I1205 12:24:27.846489 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj"] Dec 05 12:24:27 crc kubenswrapper[4711]: I1205 12:24:27.899361 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb99d\" (UniqueName: \"kubernetes.io/projected/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-kube-api-access-zb99d\") pod \"2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:27 crc kubenswrapper[4711]: I1205 12:24:27.899475 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-util\") pod \"2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:27 crc kubenswrapper[4711]: I1205 12:24:27.899528 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-bundle\") pod \"2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:28 crc kubenswrapper[4711]: I1205 12:24:28.000541 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb99d\" (UniqueName: \"kubernetes.io/projected/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-kube-api-access-zb99d\") pod \"2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:28 crc kubenswrapper[4711]: I1205 12:24:28.000594 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-util\") pod \"2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:28 crc kubenswrapper[4711]: I1205 12:24:28.000638 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-bundle\") pod \"2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:28 crc kubenswrapper[4711]: I1205 12:24:28.001657 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-bundle\") pod \"2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:28 crc kubenswrapper[4711]: I1205 12:24:28.002143 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-util\") pod \"2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:28 crc kubenswrapper[4711]: I1205 12:24:28.023466 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb99d\" (UniqueName: \"kubernetes.io/projected/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-kube-api-access-zb99d\") pod \"2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:28 crc kubenswrapper[4711]: I1205 12:24:28.155645 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:28 crc kubenswrapper[4711]: I1205 12:24:28.545538 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj"] Dec 05 12:24:29 crc kubenswrapper[4711]: I1205 12:24:29.367567 4711 generic.go:334] "Generic (PLEG): container finished" podID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerID="60f5e7b0d0802dcd23c9f29f72f51b9887bdcf98626488a5e12c151ce6c36115" exitCode=0 Dec 05 12:24:29 crc kubenswrapper[4711]: I1205 12:24:29.367612 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" event={"ID":"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26","Type":"ContainerDied","Data":"60f5e7b0d0802dcd23c9f29f72f51b9887bdcf98626488a5e12c151ce6c36115"} Dec 05 12:24:29 crc kubenswrapper[4711]: I1205 12:24:29.367639 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" event={"ID":"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26","Type":"ContainerStarted","Data":"f542fd50d1f61c5f9799f98ab12107605ed1443c04ea1becb01fd01af775e601"} Dec 05 12:24:30 crc kubenswrapper[4711]: I1205 12:24:30.375331 4711 generic.go:334] "Generic (PLEG): container finished" podID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerID="bbc8897c8d19c419be39dde0480ae915499a17baee7543bc2beadb2908d0dfc0" exitCode=0 Dec 05 12:24:30 crc kubenswrapper[4711]: I1205 12:24:30.375685 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" event={"ID":"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26","Type":"ContainerDied","Data":"bbc8897c8d19c419be39dde0480ae915499a17baee7543bc2beadb2908d0dfc0"} Dec 05 12:24:31 crc kubenswrapper[4711]: I1205 12:24:31.383105 4711 generic.go:334] "Generic (PLEG): container finished" podID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerID="2087817120a42cc3ed178408b58afc26034ce1c890a2894916bb01384383af2f" exitCode=0 Dec 05 12:24:31 crc kubenswrapper[4711]: I1205 12:24:31.383273 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" event={"ID":"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26","Type":"ContainerDied","Data":"2087817120a42cc3ed178408b58afc26034ce1c890a2894916bb01384383af2f"} Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.686419 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.704428 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-bundle\") pod \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.704500 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-util\") pod \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.704588 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zb99d\" (UniqueName: \"kubernetes.io/projected/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-kube-api-access-zb99d\") pod \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\" (UID: \"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26\") " Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.705540 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-bundle" (OuterVolumeSpecName: "bundle") pod "9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" (UID: "9aa1bdf3-cb54-4fbc-8ae3-56015757ca26"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.715874 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-kube-api-access-zb99d" (OuterVolumeSpecName: "kube-api-access-zb99d") pod "9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" (UID: "9aa1bdf3-cb54-4fbc-8ae3-56015757ca26"). InnerVolumeSpecName "kube-api-access-zb99d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.721282 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-util" (OuterVolumeSpecName: "util") pod "9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" (UID: "9aa1bdf3-cb54-4fbc-8ae3-56015757ca26"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.806051 4711 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-util\") on node \"crc\" DevicePath \"\"" Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.806462 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zb99d\" (UniqueName: \"kubernetes.io/projected/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-kube-api-access-zb99d\") on node \"crc\" DevicePath \"\"" Dec 05 12:24:32 crc kubenswrapper[4711]: I1205 12:24:32.806475 4711 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aa1bdf3-cb54-4fbc-8ae3-56015757ca26-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:24:33 crc kubenswrapper[4711]: I1205 12:24:33.400613 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" event={"ID":"9aa1bdf3-cb54-4fbc-8ae3-56015757ca26","Type":"ContainerDied","Data":"f542fd50d1f61c5f9799f98ab12107605ed1443c04ea1becb01fd01af775e601"} Dec 05 12:24:33 crc kubenswrapper[4711]: I1205 12:24:33.400655 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f542fd50d1f61c5f9799f98ab12107605ed1443c04ea1becb01fd01af775e601" Dec 05 12:24:33 crc kubenswrapper[4711]: I1205 12:24:33.400700 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj" Dec 05 12:24:39 crc kubenswrapper[4711]: I1205 12:24:39.927040 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq"] Dec 05 12:24:39 crc kubenswrapper[4711]: E1205 12:24:39.927572 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerName="util" Dec 05 12:24:39 crc kubenswrapper[4711]: I1205 12:24:39.927585 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerName="util" Dec 05 12:24:39 crc kubenswrapper[4711]: E1205 12:24:39.927601 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerName="extract" Dec 05 12:24:39 crc kubenswrapper[4711]: I1205 12:24:39.927608 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerName="extract" Dec 05 12:24:39 crc kubenswrapper[4711]: E1205 12:24:39.927619 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerName="pull" Dec 05 12:24:39 crc kubenswrapper[4711]: I1205 12:24:39.927625 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerName="pull" Dec 05 12:24:39 crc kubenswrapper[4711]: I1205 12:24:39.927739 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aa1bdf3-cb54-4fbc-8ae3-56015757ca26" containerName="extract" Dec 05 12:24:39 crc kubenswrapper[4711]: I1205 12:24:39.928205 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" Dec 05 12:24:39 crc kubenswrapper[4711]: I1205 12:24:39.929943 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-rqqst" Dec 05 12:24:39 crc kubenswrapper[4711]: I1205 12:24:39.961017 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq"] Dec 05 12:24:39 crc kubenswrapper[4711]: I1205 12:24:39.999818 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ls5dl\" (UniqueName: \"kubernetes.io/projected/2adcea3c-2abd-421f-adf0-a0e30b5873fd-kube-api-access-ls5dl\") pod \"openstack-operator-controller-operator-574d9f8c97-k2tqq\" (UID: \"2adcea3c-2abd-421f-adf0-a0e30b5873fd\") " pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" Dec 05 12:24:40 crc kubenswrapper[4711]: I1205 12:24:40.101102 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ls5dl\" (UniqueName: \"kubernetes.io/projected/2adcea3c-2abd-421f-adf0-a0e30b5873fd-kube-api-access-ls5dl\") pod \"openstack-operator-controller-operator-574d9f8c97-k2tqq\" (UID: \"2adcea3c-2abd-421f-adf0-a0e30b5873fd\") " pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" Dec 05 12:24:40 crc kubenswrapper[4711]: I1205 12:24:40.144468 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ls5dl\" (UniqueName: \"kubernetes.io/projected/2adcea3c-2abd-421f-adf0-a0e30b5873fd-kube-api-access-ls5dl\") pod \"openstack-operator-controller-operator-574d9f8c97-k2tqq\" (UID: \"2adcea3c-2abd-421f-adf0-a0e30b5873fd\") " pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" Dec 05 12:24:40 crc kubenswrapper[4711]: I1205 12:24:40.258275 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" Dec 05 12:24:40 crc kubenswrapper[4711]: I1205 12:24:40.552706 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq"] Dec 05 12:24:41 crc kubenswrapper[4711]: I1205 12:24:41.477400 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" event={"ID":"2adcea3c-2abd-421f-adf0-a0e30b5873fd","Type":"ContainerStarted","Data":"93d829df00aec9e7ce937b89f86f1701b8ccac94d5896123a182d064db8cdfba"} Dec 05 12:24:45 crc kubenswrapper[4711]: I1205 12:24:45.516950 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" event={"ID":"2adcea3c-2abd-421f-adf0-a0e30b5873fd","Type":"ContainerStarted","Data":"08028b031d3725fe7f2b8f951b15fb2d2f929f02b0204c3e10b443dbca77598f"} Dec 05 12:24:45 crc kubenswrapper[4711]: I1205 12:24:45.517426 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" Dec 05 12:24:45 crc kubenswrapper[4711]: I1205 12:24:45.553260 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" podStartSLOduration=2.744576232 podStartE2EDuration="6.553238092s" podCreationTimestamp="2025-12-05 12:24:39 +0000 UTC" firstStartedPulling="2025-12-05 12:24:40.577069522 +0000 UTC m=+926.161391852" lastFinishedPulling="2025-12-05 12:24:44.385731372 +0000 UTC m=+929.970053712" observedRunningTime="2025-12-05 12:24:45.548800832 +0000 UTC m=+931.133123182" watchObservedRunningTime="2025-12-05 12:24:45.553238092 +0000 UTC m=+931.137560462" Dec 05 12:24:48 crc kubenswrapper[4711]: I1205 12:24:48.300848 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:24:48 crc kubenswrapper[4711]: I1205 12:24:48.301503 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:24:50 crc kubenswrapper[4711]: I1205 12:24:50.261066 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-574d9f8c97-k2tqq" Dec 05 12:25:18 crc kubenswrapper[4711]: I1205 12:25:18.301326 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:25:18 crc kubenswrapper[4711]: I1205 12:25:18.301947 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:25:18 crc kubenswrapper[4711]: I1205 12:25:18.301996 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:25:18 crc kubenswrapper[4711]: I1205 12:25:18.302595 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"472c49a169e1c65e73aac62e2dd2cc7781a63dc02785bf64789fa6376616fd5d"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:25:18 crc kubenswrapper[4711]: I1205 12:25:18.302647 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://472c49a169e1c65e73aac62e2dd2cc7781a63dc02785bf64789fa6376616fd5d" gracePeriod=600 Dec 05 12:25:18 crc kubenswrapper[4711]: I1205 12:25:18.772058 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="472c49a169e1c65e73aac62e2dd2cc7781a63dc02785bf64789fa6376616fd5d" exitCode=0 Dec 05 12:25:18 crc kubenswrapper[4711]: I1205 12:25:18.772375 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"472c49a169e1c65e73aac62e2dd2cc7781a63dc02785bf64789fa6376616fd5d"} Dec 05 12:25:18 crc kubenswrapper[4711]: I1205 12:25:18.772421 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"165f762379e5d3244fd9c1d378fa831957f0f903eab36ae386a24b293865ce1b"} Dec 05 12:25:18 crc kubenswrapper[4711]: I1205 12:25:18.772438 4711 scope.go:117] "RemoveContainer" containerID="91470354b4a7faa05d09c834cdfee9e799a693b86070e148c7f8f23ec7c68fa3" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.310699 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.312526 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.314841 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-2kx5b" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.328982 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.329037 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.332333 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.334118 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-kzmwd" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.377095 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.403141 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.404708 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.409886 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-m747r" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.415355 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.416680 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.418938 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-q6cgt" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.426573 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.441606 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.442594 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.450772 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-fz2rb" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.473233 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6crkt\" (UniqueName: \"kubernetes.io/projected/76fdb364-88f3-4033-8318-353b66329f32-kube-api-access-6crkt\") pod \"barbican-operator-controller-manager-7d9dfd778-4vr8d\" (UID: \"76fdb364-88f3-4033-8318-353b66329f32\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.473334 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmdsv\" (UniqueName: \"kubernetes.io/projected/f6b616b6-23f5-4671-8d91-cc11317f07a6-kube-api-access-rmdsv\") pod \"cinder-operator-controller-manager-859b6ccc6-mdk4g\" (UID: \"f6b616b6-23f5-4671-8d91-cc11317f07a6\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.514377 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.526878 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.527955 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.529340 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.532592 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-k65mc" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.539351 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.572378 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-8b94l"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.573354 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.574965 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmdsv\" (UniqueName: \"kubernetes.io/projected/f6b616b6-23f5-4671-8d91-cc11317f07a6-kube-api-access-rmdsv\") pod \"cinder-operator-controller-manager-859b6ccc6-mdk4g\" (UID: \"f6b616b6-23f5-4671-8d91-cc11317f07a6\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.575011 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hvld\" (UniqueName: \"kubernetes.io/projected/bd9c2a02-45a9-444e-b026-18f6f632d157-kube-api-access-5hvld\") pod \"designate-operator-controller-manager-78b4bc895b-jxmwb\" (UID: \"bd9c2a02-45a9-444e-b026-18f6f632d157\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.575052 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88chz\" (UniqueName: \"kubernetes.io/projected/81f3007f-2841-4b35-b36d-7527ad69da4f-kube-api-access-88chz\") pod \"glance-operator-controller-manager-77987cd8cd-z9pxv\" (UID: \"81f3007f-2841-4b35-b36d-7527ad69da4f\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.575072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6crkt\" (UniqueName: \"kubernetes.io/projected/76fdb364-88f3-4033-8318-353b66329f32-kube-api-access-6crkt\") pod \"barbican-operator-controller-manager-7d9dfd778-4vr8d\" (UID: \"76fdb364-88f3-4033-8318-353b66329f32\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.575191 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tbpx\" (UniqueName: \"kubernetes.io/projected/61d6cab9-9cd8-443b-ba0f-90de0670366b-kube-api-access-9tbpx\") pod \"heat-operator-controller-manager-5f64f6f8bb-4nlqw\" (UID: \"61d6cab9-9cd8-443b-ba0f-90de0670366b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.576066 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.577000 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.585687 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.585844 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-kw8mx" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.586076 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-n2zvt" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.613824 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmdsv\" (UniqueName: \"kubernetes.io/projected/f6b616b6-23f5-4671-8d91-cc11317f07a6-kube-api-access-rmdsv\") pod \"cinder-operator-controller-manager-859b6ccc6-mdk4g\" (UID: \"f6b616b6-23f5-4671-8d91-cc11317f07a6\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.614764 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-8b94l"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.620125 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6crkt\" (UniqueName: \"kubernetes.io/projected/76fdb364-88f3-4033-8318-353b66329f32-kube-api-access-6crkt\") pod \"barbican-operator-controller-manager-7d9dfd778-4vr8d\" (UID: \"76fdb364-88f3-4033-8318-353b66329f32\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.657634 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.672442 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.673823 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.675876 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88chz\" (UniqueName: \"kubernetes.io/projected/81f3007f-2841-4b35-b36d-7527ad69da4f-kube-api-access-88chz\") pod \"glance-operator-controller-manager-77987cd8cd-z9pxv\" (UID: \"81f3007f-2841-4b35-b36d-7527ad69da4f\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.675934 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7fcf\" (UniqueName: \"kubernetes.io/projected/0ee16c2f-bb8c-495c-a007-41444751c118-kube-api-access-q7fcf\") pod \"ironic-operator-controller-manager-6c548fd776-gpstg\" (UID: \"0ee16c2f-bb8c-495c-a007-41444751c118\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.675976 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.676000 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw7v8\" (UniqueName: \"kubernetes.io/projected/162ed24d-7f1a-43d3-a543-84a19891bcd0-kube-api-access-vw7v8\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.676029 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tbpx\" (UniqueName: \"kubernetes.io/projected/61d6cab9-9cd8-443b-ba0f-90de0670366b-kube-api-access-9tbpx\") pod \"heat-operator-controller-manager-5f64f6f8bb-4nlqw\" (UID: \"61d6cab9-9cd8-443b-ba0f-90de0670366b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.676098 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hvld\" (UniqueName: \"kubernetes.io/projected/bd9c2a02-45a9-444e-b026-18f6f632d157-kube-api-access-5hvld\") pod \"designate-operator-controller-manager-78b4bc895b-jxmwb\" (UID: \"bd9c2a02-45a9-444e-b026-18f6f632d157\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.676127 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghwfj\" (UniqueName: \"kubernetes.io/projected/1647c9ea-6d82-4f30-9641-25f10d54fbc6-kube-api-access-ghwfj\") pod \"horizon-operator-controller-manager-68c6d99b8f-wl765\" (UID: \"1647c9ea-6d82-4f30-9641-25f10d54fbc6\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.676957 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-x4tnh" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.680609 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.680828 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.702857 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.725865 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.727017 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.727658 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.745547 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.746295 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tbpx\" (UniqueName: \"kubernetes.io/projected/61d6cab9-9cd8-443b-ba0f-90de0670366b-kube-api-access-9tbpx\") pod \"heat-operator-controller-manager-5f64f6f8bb-4nlqw\" (UID: \"61d6cab9-9cd8-443b-ba0f-90de0670366b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.746878 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.746966 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-2t8vc" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.756117 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-pfdwj" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.756544 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88chz\" (UniqueName: \"kubernetes.io/projected/81f3007f-2841-4b35-b36d-7527ad69da4f-kube-api-access-88chz\") pod \"glance-operator-controller-manager-77987cd8cd-z9pxv\" (UID: \"81f3007f-2841-4b35-b36d-7527ad69da4f\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.758076 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hvld\" (UniqueName: \"kubernetes.io/projected/bd9c2a02-45a9-444e-b026-18f6f632d157-kube-api-access-5hvld\") pod \"designate-operator-controller-manager-78b4bc895b-jxmwb\" (UID: \"bd9c2a02-45a9-444e-b026-18f6f632d157\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.763481 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.765201 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.769448 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.779734 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7fcf\" (UniqueName: \"kubernetes.io/projected/0ee16c2f-bb8c-495c-a007-41444751c118-kube-api-access-q7fcf\") pod \"ironic-operator-controller-manager-6c548fd776-gpstg\" (UID: \"0ee16c2f-bb8c-495c-a007-41444751c118\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.779790 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnzkd\" (UniqueName: \"kubernetes.io/projected/aef68c5a-5fdd-47dd-8205-8d39019e124e-kube-api-access-tnzkd\") pod \"keystone-operator-controller-manager-7765d96ddf-fs4fj\" (UID: \"aef68c5a-5fdd-47dd-8205-8d39019e124e\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.779822 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.779843 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw7v8\" (UniqueName: \"kubernetes.io/projected/162ed24d-7f1a-43d3-a543-84a19891bcd0-kube-api-access-vw7v8\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.779928 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghwfj\" (UniqueName: \"kubernetes.io/projected/1647c9ea-6d82-4f30-9641-25f10d54fbc6-kube-api-access-ghwfj\") pod \"horizon-operator-controller-manager-68c6d99b8f-wl765\" (UID: \"1647c9ea-6d82-4f30-9641-25f10d54fbc6\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" Dec 05 12:25:29 crc kubenswrapper[4711]: E1205 12:25:29.780678 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:29 crc kubenswrapper[4711]: E1205 12:25:29.780721 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert podName:162ed24d-7f1a-43d3-a543-84a19891bcd0 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:30.28070477 +0000 UTC m=+975.865027100 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert") pod "infra-operator-controller-manager-57548d458d-8b94l" (UID: "162ed24d-7f1a-43d3-a543-84a19891bcd0") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.792150 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-p7bxq" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.805649 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.806647 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7fcf\" (UniqueName: \"kubernetes.io/projected/0ee16c2f-bb8c-495c-a007-41444751c118-kube-api-access-q7fcf\") pod \"ironic-operator-controller-manager-6c548fd776-gpstg\" (UID: \"0ee16c2f-bb8c-495c-a007-41444751c118\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.818335 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw7v8\" (UniqueName: \"kubernetes.io/projected/162ed24d-7f1a-43d3-a543-84a19891bcd0-kube-api-access-vw7v8\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.834906 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghwfj\" (UniqueName: \"kubernetes.io/projected/1647c9ea-6d82-4f30-9641-25f10d54fbc6-kube-api-access-ghwfj\") pod \"horizon-operator-controller-manager-68c6d99b8f-wl765\" (UID: \"1647c9ea-6d82-4f30-9641-25f10d54fbc6\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.864845 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.880127 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.881660 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.883464 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnzkd\" (UniqueName: \"kubernetes.io/projected/aef68c5a-5fdd-47dd-8205-8d39019e124e-kube-api-access-tnzkd\") pod \"keystone-operator-controller-manager-7765d96ddf-fs4fj\" (UID: \"aef68c5a-5fdd-47dd-8205-8d39019e124e\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.883528 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj8vz\" (UniqueName: \"kubernetes.io/projected/2fbc7dde-da0b-48ab-9af3-0c023a9f446b-kube-api-access-kj8vz\") pod \"manila-operator-controller-manager-7c79b5df47-b2tj4\" (UID: \"2fbc7dde-da0b-48ab-9af3-0c023a9f446b\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.883579 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km2cw\" (UniqueName: \"kubernetes.io/projected/fd19ef58-a657-4733-bc5f-0917ea66ee3b-kube-api-access-km2cw\") pod \"mariadb-operator-controller-manager-56bbcc9d85-glz4l\" (UID: \"fd19ef58-a657-4733-bc5f-0917ea66ee3b\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.883623 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppxfx\" (UniqueName: \"kubernetes.io/projected/c5aa315b-0c36-4fba-a6a0-69cc18d6f21f-kube-api-access-ppxfx\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-87csw\" (UID: \"c5aa315b-0c36-4fba-a6a0-69cc18d6f21f\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.887899 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-mbczg" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.895003 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.896721 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.909265 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-8bvrf" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.921867 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnzkd\" (UniqueName: \"kubernetes.io/projected/aef68c5a-5fdd-47dd-8205-8d39019e124e-kube-api-access-tnzkd\") pod \"keystone-operator-controller-manager-7765d96ddf-fs4fj\" (UID: \"aef68c5a-5fdd-47dd-8205-8d39019e124e\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.929081 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw"] Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.977829 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.985212 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km2cw\" (UniqueName: \"kubernetes.io/projected/fd19ef58-a657-4733-bc5f-0917ea66ee3b-kube-api-access-km2cw\") pod \"mariadb-operator-controller-manager-56bbcc9d85-glz4l\" (UID: \"fd19ef58-a657-4733-bc5f-0917ea66ee3b\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.985303 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppxfx\" (UniqueName: \"kubernetes.io/projected/c5aa315b-0c36-4fba-a6a0-69cc18d6f21f-kube-api-access-ppxfx\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-87csw\" (UID: \"c5aa315b-0c36-4fba-a6a0-69cc18d6f21f\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.985337 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q77kc\" (UniqueName: \"kubernetes.io/projected/c900de25-1ca3-4a0f-8485-c0e7d1b05f12-kube-api-access-q77kc\") pod \"octavia-operator-controller-manager-998648c74-mmwc9\" (UID: \"c900de25-1ca3-4a0f-8485-c0e7d1b05f12\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.985363 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w972m\" (UniqueName: \"kubernetes.io/projected/7aee5f79-d3e2-4f7b-9047-d2ca4a048c00-kube-api-access-w972m\") pod \"nova-operator-controller-manager-697bc559fc-fs7kh\" (UID: \"7aee5f79-d3e2-4f7b-9047-d2ca4a048c00\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" Dec 05 12:25:29 crc kubenswrapper[4711]: I1205 12:25:29.985427 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj8vz\" (UniqueName: \"kubernetes.io/projected/2fbc7dde-da0b-48ab-9af3-0c023a9f446b-kube-api-access-kj8vz\") pod \"manila-operator-controller-manager-7c79b5df47-b2tj4\" (UID: \"2fbc7dde-da0b-48ab-9af3-0c023a9f446b\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.021692 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppxfx\" (UniqueName: \"kubernetes.io/projected/c5aa315b-0c36-4fba-a6a0-69cc18d6f21f-kube-api-access-ppxfx\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-87csw\" (UID: \"c5aa315b-0c36-4fba-a6a0-69cc18d6f21f\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.031308 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km2cw\" (UniqueName: \"kubernetes.io/projected/fd19ef58-a657-4733-bc5f-0917ea66ee3b-kube-api-access-km2cw\") pod \"mariadb-operator-controller-manager-56bbcc9d85-glz4l\" (UID: \"fd19ef58-a657-4733-bc5f-0917ea66ee3b\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.032029 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj8vz\" (UniqueName: \"kubernetes.io/projected/2fbc7dde-da0b-48ab-9af3-0c023a9f446b-kube-api-access-kj8vz\") pod \"manila-operator-controller-manager-7c79b5df47-b2tj4\" (UID: \"2fbc7dde-da0b-48ab-9af3-0c023a9f446b\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.037486 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.037960 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.039097 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.043970 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.045998 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.050793 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.051723 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-xc87x" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.079153 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.116018 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.117106 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.117137 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.119342 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-qqvmn" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.121987 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q77kc\" (UniqueName: \"kubernetes.io/projected/c900de25-1ca3-4a0f-8485-c0e7d1b05f12-kube-api-access-q77kc\") pod \"octavia-operator-controller-manager-998648c74-mmwc9\" (UID: \"c900de25-1ca3-4a0f-8485-c0e7d1b05f12\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.122011 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w972m\" (UniqueName: \"kubernetes.io/projected/7aee5f79-d3e2-4f7b-9047-d2ca4a048c00-kube-api-access-w972m\") pod \"nova-operator-controller-manager-697bc559fc-fs7kh\" (UID: \"7aee5f79-d3e2-4f7b-9047-d2ca4a048c00\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.135145 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.144082 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.145867 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.152074 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q77kc\" (UniqueName: \"kubernetes.io/projected/c900de25-1ca3-4a0f-8485-c0e7d1b05f12-kube-api-access-q77kc\") pod \"octavia-operator-controller-manager-998648c74-mmwc9\" (UID: \"c900de25-1ca3-4a0f-8485-c0e7d1b05f12\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.152518 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-8k9lp" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.152738 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.164710 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.178930 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.189525 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w972m\" (UniqueName: \"kubernetes.io/projected/7aee5f79-d3e2-4f7b-9047-d2ca4a048c00-kube-api-access-w972m\") pod \"nova-operator-controller-manager-697bc559fc-fs7kh\" (UID: \"7aee5f79-d3e2-4f7b-9047-d2ca4a048c00\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.225378 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.226621 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hbp4\" (UniqueName: \"kubernetes.io/projected/50fdc7e4-c9d8-487f-827b-5e087aebdcb0-kube-api-access-7hbp4\") pod \"placement-operator-controller-manager-78f8948974-sjdwh\" (UID: \"50fdc7e4-c9d8-487f-827b-5e087aebdcb0\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.226645 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sst26\" (UniqueName: \"kubernetes.io/projected/1b0cf425-4752-41b6-9a30-862e38015368-kube-api-access-sst26\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.226678 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.226741 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx9kp\" (UniqueName: \"kubernetes.io/projected/d036d428-1c3b-4a5c-b071-04c3270d8e0d-kube-api-access-sx9kp\") pod \"ovn-operator-controller-manager-b6456fdb6-vd4nf\" (UID: \"d036d428-1c3b-4a5c-b071-04c3270d8e0d\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.234187 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.240986 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.243875 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.245231 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.246073 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.253321 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-b7lbc" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.255114 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.257322 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.261010 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-dkw5x" Dec 05 12:25:30 crc kubenswrapper[4711]: W1205 12:25:30.296299 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6b616b6_23f5_4671_8d91_cc11317f07a6.slice/crio-1645d0a6ab4d264197c5ec7272a550749e8e21dc2fee4f771f421823ec2935b3 WatchSource:0}: Error finding container 1645d0a6ab4d264197c5ec7272a550749e8e21dc2fee4f771f421823ec2935b3: Status 404 returned error can't find the container with id 1645d0a6ab4d264197c5ec7272a550749e8e21dc2fee4f771f421823ec2935b3 Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.305786 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.318851 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.329610 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.329665 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx9kp\" (UniqueName: \"kubernetes.io/projected/d036d428-1c3b-4a5c-b071-04c3270d8e0d-kube-api-access-sx9kp\") pod \"ovn-operator-controller-manager-b6456fdb6-vd4nf\" (UID: \"d036d428-1c3b-4a5c-b071-04c3270d8e0d\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.329731 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt878\" (UniqueName: \"kubernetes.io/projected/52651fb6-d860-4d58-9084-8d2c8dc49529-kube-api-access-bt878\") pod \"telemetry-operator-controller-manager-76cc84c6bb-nzkbm\" (UID: \"52651fb6-d860-4d58-9084-8d2c8dc49529\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.329755 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hbp4\" (UniqueName: \"kubernetes.io/projected/50fdc7e4-c9d8-487f-827b-5e087aebdcb0-kube-api-access-7hbp4\") pod \"placement-operator-controller-manager-78f8948974-sjdwh\" (UID: \"50fdc7e4-c9d8-487f-827b-5e087aebdcb0\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.329775 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sst26\" (UniqueName: \"kubernetes.io/projected/1b0cf425-4752-41b6-9a30-862e38015368-kube-api-access-sst26\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.329803 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.329821 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmhck\" (UniqueName: \"kubernetes.io/projected/1651f201-08ba-4dea-88ff-680d91d475d3-kube-api-access-tmhck\") pod \"swift-operator-controller-manager-5f8c65bbfc-c989k\" (UID: \"1651f201-08ba-4dea-88ff-680d91d475d3\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.329940 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.329983 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert podName:162ed24d-7f1a-43d3-a543-84a19891bcd0 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:31.329967496 +0000 UTC m=+976.914289816 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert") pod "infra-operator-controller-manager-57548d458d-8b94l" (UID: "162ed24d-7f1a-43d3-a543-84a19891bcd0") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.330667 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.330709 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert podName:1b0cf425-4752-41b6-9a30-862e38015368 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:30.830700564 +0000 UTC m=+976.415022894 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" (UID: "1b0cf425-4752-41b6-9a30-862e38015368") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.353978 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hbp4\" (UniqueName: \"kubernetes.io/projected/50fdc7e4-c9d8-487f-827b-5e087aebdcb0-kube-api-access-7hbp4\") pod \"placement-operator-controller-manager-78f8948974-sjdwh\" (UID: \"50fdc7e4-c9d8-487f-827b-5e087aebdcb0\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.359289 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.366655 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx9kp\" (UniqueName: \"kubernetes.io/projected/d036d428-1c3b-4a5c-b071-04c3270d8e0d-kube-api-access-sx9kp\") pod \"ovn-operator-controller-manager-b6456fdb6-vd4nf\" (UID: \"d036d428-1c3b-4a5c-b071-04c3270d8e0d\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.368137 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sst26\" (UniqueName: \"kubernetes.io/projected/1b0cf425-4752-41b6-9a30-862e38015368-kube-api-access-sst26\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.393569 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.394860 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.398725 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-qzwz5" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.412582 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.430515 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.431964 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.440329 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjfwz\" (UniqueName: \"kubernetes.io/projected/8bda4a64-08b2-4c48-b0c6-bfb094b7b985-kube-api-access-pjfwz\") pod \"test-operator-controller-manager-5854674fcc-vvnrf\" (UID: \"8bda4a64-08b2-4c48-b0c6-bfb094b7b985\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.440425 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt878\" (UniqueName: \"kubernetes.io/projected/52651fb6-d860-4d58-9084-8d2c8dc49529-kube-api-access-bt878\") pod \"telemetry-operator-controller-manager-76cc84c6bb-nzkbm\" (UID: \"52651fb6-d860-4d58-9084-8d2c8dc49529\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.440485 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmhck\" (UniqueName: \"kubernetes.io/projected/1651f201-08ba-4dea-88ff-680d91d475d3-kube-api-access-tmhck\") pod \"swift-operator-controller-manager-5f8c65bbfc-c989k\" (UID: \"1651f201-08ba-4dea-88ff-680d91d475d3\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.443257 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.452824 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-fbdhx" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.469974 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmhck\" (UniqueName: \"kubernetes.io/projected/1651f201-08ba-4dea-88ff-680d91d475d3-kube-api-access-tmhck\") pod \"swift-operator-controller-manager-5f8c65bbfc-c989k\" (UID: \"1651f201-08ba-4dea-88ff-680d91d475d3\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.470678 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt878\" (UniqueName: \"kubernetes.io/projected/52651fb6-d860-4d58-9084-8d2c8dc49529-kube-api-access-bt878\") pod \"telemetry-operator-controller-manager-76cc84c6bb-nzkbm\" (UID: \"52651fb6-d860-4d58-9084-8d2c8dc49529\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.473263 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.474454 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.475881 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.478212 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.478488 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-rjbnp" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.488152 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.496490 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4"] Dec 05 12:25:30 crc kubenswrapper[4711]: W1205 12:25:30.531732 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ee16c2f_bb8c_495c_a007_41444751c118.slice/crio-ac9547b470b5b40e0a858396e383f09b357cd80c25797395af0eb007d4770cfc WatchSource:0}: Error finding container ac9547b470b5b40e0a858396e383f09b357cd80c25797395af0eb007d4770cfc: Status 404 returned error can't find the container with id ac9547b470b5b40e0a858396e383f09b357cd80c25797395af0eb007d4770cfc Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.536492 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.541188 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.562680 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.562733 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7mml\" (UniqueName: \"kubernetes.io/projected/09a6ffec-d739-4b72-8b6b-83609ed4f571-kube-api-access-b7mml\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.562799 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbgqr\" (UniqueName: \"kubernetes.io/projected/85bb9f84-9769-4960-83a3-00f1a8fb9851-kube-api-access-fbgqr\") pod \"watcher-operator-controller-manager-6c9545865c-7z2lk\" (UID: \"85bb9f84-9769-4960-83a3-00f1a8fb9851\") " pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.562970 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.563076 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjfwz\" (UniqueName: \"kubernetes.io/projected/8bda4a64-08b2-4c48-b0c6-bfb094b7b985-kube-api-access-pjfwz\") pod \"test-operator-controller-manager-5854674fcc-vvnrf\" (UID: \"8bda4a64-08b2-4c48-b0c6-bfb094b7b985\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.577017 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.578024 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.580783 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-sr4sr" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.586103 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.586293 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjfwz\" (UniqueName: \"kubernetes.io/projected/8bda4a64-08b2-4c48-b0c6-bfb094b7b985-kube-api-access-pjfwz\") pod \"test-operator-controller-manager-5854674fcc-vvnrf\" (UID: \"8bda4a64-08b2-4c48-b0c6-bfb094b7b985\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.624244 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.624830 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.632854 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.664536 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.664708 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7mml\" (UniqueName: \"kubernetes.io/projected/09a6ffec-d739-4b72-8b6b-83609ed4f571-kube-api-access-b7mml\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.664813 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbgqr\" (UniqueName: \"kubernetes.io/projected/85bb9f84-9769-4960-83a3-00f1a8fb9851-kube-api-access-fbgqr\") pod \"watcher-operator-controller-manager-6c9545865c-7z2lk\" (UID: \"85bb9f84-9769-4960-83a3-00f1a8fb9851\") " pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.664926 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s5g4\" (UniqueName: \"kubernetes.io/projected/e1047891-b091-4202-a4fc-38abe49ced8c-kube-api-access-2s5g4\") pod \"rabbitmq-cluster-operator-manager-668c99d594-tj9mg\" (UID: \"e1047891-b091-4202-a4fc-38abe49ced8c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.665008 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.665174 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.665686 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:31.165377477 +0000 UTC m=+976.749699837 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "webhook-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.666001 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.666090 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:31.166078775 +0000 UTC m=+976.750401105 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "metrics-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.669095 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.692203 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7mml\" (UniqueName: \"kubernetes.io/projected/09a6ffec-d739-4b72-8b6b-83609ed4f571-kube-api-access-b7mml\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.697907 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbgqr\" (UniqueName: \"kubernetes.io/projected/85bb9f84-9769-4960-83a3-00f1a8fb9851-kube-api-access-fbgqr\") pod \"watcher-operator-controller-manager-6c9545865c-7z2lk\" (UID: \"85bb9f84-9769-4960-83a3-00f1a8fb9851\") " pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" Dec 05 12:25:30 crc kubenswrapper[4711]: W1205 12:25:30.738372 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61d6cab9_9cd8_443b_ba0f_90de0670366b.slice/crio-ff2d22490c306718ac18e817ae175a16c3834f5267c55d5c1f36cfd802e0b838 WatchSource:0}: Error finding container ff2d22490c306718ac18e817ae175a16c3834f5267c55d5c1f36cfd802e0b838: Status 404 returned error can't find the container with id ff2d22490c306718ac18e817ae175a16c3834f5267c55d5c1f36cfd802e0b838 Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.741562 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw"] Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.767206 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s5g4\" (UniqueName: \"kubernetes.io/projected/e1047891-b091-4202-a4fc-38abe49ced8c-kube-api-access-2s5g4\") pod \"rabbitmq-cluster-operator-manager-668c99d594-tj9mg\" (UID: \"e1047891-b091-4202-a4fc-38abe49ced8c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.774080 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.793947 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s5g4\" (UniqueName: \"kubernetes.io/projected/e1047891-b091-4202-a4fc-38abe49ced8c-kube-api-access-2s5g4\") pod \"rabbitmq-cluster-operator-manager-668c99d594-tj9mg\" (UID: \"e1047891-b091-4202-a4fc-38abe49ced8c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.843351 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.853652 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.870208 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.870350 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: E1205 12:25:30.870432 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert podName:1b0cf425-4752-41b6-9a30-862e38015368 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:31.870415474 +0000 UTC m=+977.454737804 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" (UID: "1b0cf425-4752-41b6-9a30-862e38015368") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.870800 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" event={"ID":"76fdb364-88f3-4033-8318-353b66329f32","Type":"ContainerStarted","Data":"6083a7b833de74f456502ce7b0ae9519955be264b7f612e71b0bd3d1f06ed4f9"} Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.871883 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" event={"ID":"0ee16c2f-bb8c-495c-a007-41444751c118","Type":"ContainerStarted","Data":"ac9547b470b5b40e0a858396e383f09b357cd80c25797395af0eb007d4770cfc"} Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.872867 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" event={"ID":"f6b616b6-23f5-4671-8d91-cc11317f07a6","Type":"ContainerStarted","Data":"1645d0a6ab4d264197c5ec7272a550749e8e21dc2fee4f771f421823ec2935b3"} Dec 05 12:25:30 crc kubenswrapper[4711]: I1205 12:25:30.874442 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" event={"ID":"61d6cab9-9cd8-443b-ba0f-90de0670366b","Type":"ContainerStarted","Data":"ff2d22490c306718ac18e817ae175a16c3834f5267c55d5c1f36cfd802e0b838"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.174543 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh"] Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.174778 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.174868 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.175036 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.175110 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:32.175091897 +0000 UTC m=+977.759414227 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "metrics-server-cert" not found Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.175171 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.175294 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:32.175273872 +0000 UTC m=+977.759596192 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "webhook-server-cert" not found Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.182006 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765"] Dec 05 12:25:31 crc kubenswrapper[4711]: W1205 12:25:31.183561 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7aee5f79_d3e2_4f7b_9047_d2ca4a048c00.slice/crio-b353d1e4991c17c27c5d5366c3e49e7b844658c05aa928c72d80f9d25cc487c6 WatchSource:0}: Error finding container b353d1e4991c17c27c5d5366c3e49e7b844658c05aa928c72d80f9d25cc487c6: Status 404 returned error can't find the container with id b353d1e4991c17c27c5d5366c3e49e7b844658c05aa928c72d80f9d25cc487c6 Dec 05 12:25:31 crc kubenswrapper[4711]: W1205 12:25:31.188833 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd9c2a02_45a9_444e_b026_18f6f632d157.slice/crio-17f94a3478d914f7f58f65a7a7296e18ddcce5927bfe4fba41de5822c94ddf2f WatchSource:0}: Error finding container 17f94a3478d914f7f58f65a7a7296e18ddcce5927bfe4fba41de5822c94ddf2f: Status 404 returned error can't find the container with id 17f94a3478d914f7f58f65a7a7296e18ddcce5927bfe4fba41de5822c94ddf2f Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.192083 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb"] Dec 05 12:25:31 crc kubenswrapper[4711]: W1205 12:25:31.195041 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1647c9ea_6d82_4f30_9641_25f10d54fbc6.slice/crio-cc965c43e4e2a7d066c8f8f2c8b2e4b442826ccda563bb1bd1ae8285957666a1 WatchSource:0}: Error finding container cc965c43e4e2a7d066c8f8f2c8b2e4b442826ccda563bb1bd1ae8285957666a1: Status 404 returned error can't find the container with id cc965c43e4e2a7d066c8f8f2c8b2e4b442826ccda563bb1bd1ae8285957666a1 Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.245463 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l"] Dec 05 12:25:31 crc kubenswrapper[4711]: W1205 12:25:31.247590 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81f3007f_2841_4b35_b36d_7527ad69da4f.slice/crio-8b09bd0bc82b800548fa19d88be19f328f7ceb0e099381f65626b3d234f267db WatchSource:0}: Error finding container 8b09bd0bc82b800548fa19d88be19f328f7ceb0e099381f65626b3d234f267db: Status 404 returned error can't find the container with id 8b09bd0bc82b800548fa19d88be19f328f7ceb0e099381f65626b3d234f267db Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.255985 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv"] Dec 05 12:25:31 crc kubenswrapper[4711]: W1205 12:25:31.256903 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd19ef58_a657_4733_bc5f_0917ea66ee3b.slice/crio-c85365547e29fefde06c302d0b3e396bddcfbce9798a82ee3115311d1e53042e WatchSource:0}: Error finding container c85365547e29fefde06c302d0b3e396bddcfbce9798a82ee3115311d1e53042e: Status 404 returned error can't find the container with id c85365547e29fefde06c302d0b3e396bddcfbce9798a82ee3115311d1e53042e Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.358905 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k"] Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.371779 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj"] Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.378166 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.378486 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.378637 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert podName:162ed24d-7f1a-43d3-a543-84a19891bcd0 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:33.378560316 +0000 UTC m=+978.962882706 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert") pod "infra-operator-controller-manager-57548d458d-8b94l" (UID: "162ed24d-7f1a-43d3-a543-84a19891bcd0") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:31 crc kubenswrapper[4711]: W1205 12:25:31.385697 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fbc7dde_da0b_48ab_9af3_0c023a9f446b.slice/crio-6c86249441846a23cad1163b12f03cbc576fa51d049cb9c4e3fd317120170264 WatchSource:0}: Error finding container 6c86249441846a23cad1163b12f03cbc576fa51d049cb9c4e3fd317120170264: Status 404 returned error can't find the container with id 6c86249441846a23cad1163b12f03cbc576fa51d049cb9c4e3fd317120170264 Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.390301 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf"] Dec 05 12:25:31 crc kubenswrapper[4711]: W1205 12:25:31.391712 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd036d428_1c3b_4a5c_b071_04c3270d8e0d.slice/crio-57bf7ace8de6d105fbb8c74a899867e9bbbe56360dfd090875da4794c84e22f7 WatchSource:0}: Error finding container 57bf7ace8de6d105fbb8c74a899867e9bbbe56360dfd090875da4794c84e22f7: Status 404 returned error can't find the container with id 57bf7ace8de6d105fbb8c74a899867e9bbbe56360dfd090875da4794c84e22f7 Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.397259 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm"] Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.403245 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4"] Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.606203 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9"] Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.611169 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh"] Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.618609 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf"] Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.625046 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg"] Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.631444 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk"] Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.640075 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2s5g4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-tj9mg_openstack-operators(e1047891-b091-4202-a4fc-38abe49ced8c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.640277 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pjfwz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-vvnrf_openstack-operators(8bda4a64-08b2-4c48-b0c6-bfb094b7b985): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.643221 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" podUID="e1047891-b091-4202-a4fc-38abe49ced8c" Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.644381 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw"] Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.646060 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pjfwz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-vvnrf_openstack-operators(8bda4a64-08b2-4c48-b0c6-bfb094b7b985): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.647327 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" podUID="8bda4a64-08b2-4c48-b0c6-bfb094b7b985" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.679418 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.20:5001/openstack-k8s-operators/watcher-operator:d23b8876e1bcf18983498fca8ec9314bc8124a8c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fbgqr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6c9545865c-7z2lk_openstack-operators(85bb9f84-9769-4960-83a3-00f1a8fb9851): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.681466 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fbgqr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6c9545865c-7z2lk_openstack-operators(85bb9f84-9769-4960-83a3-00f1a8fb9851): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.681848 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ppxfx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-87csw_openstack-operators(c5aa315b-0c36-4fba-a6a0-69cc18d6f21f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.683167 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" podUID="85bb9f84-9769-4960-83a3-00f1a8fb9851" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.684327 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ppxfx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-87csw_openstack-operators(c5aa315b-0c36-4fba-a6a0-69cc18d6f21f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.686117 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" podUID="c5aa315b-0c36-4fba-a6a0-69cc18d6f21f" Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.901147 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.902136 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.902239 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert podName:1b0cf425-4752-41b6-9a30-862e38015368 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:33.902209089 +0000 UTC m=+979.486531419 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" (UID: "1b0cf425-4752-41b6-9a30-862e38015368") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.914228 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" event={"ID":"bd9c2a02-45a9-444e-b026-18f6f632d157","Type":"ContainerStarted","Data":"17f94a3478d914f7f58f65a7a7296e18ddcce5927bfe4fba41de5822c94ddf2f"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.929053 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" event={"ID":"81f3007f-2841-4b35-b36d-7527ad69da4f","Type":"ContainerStarted","Data":"8b09bd0bc82b800548fa19d88be19f328f7ceb0e099381f65626b3d234f267db"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.933682 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" event={"ID":"52651fb6-d860-4d58-9084-8d2c8dc49529","Type":"ContainerStarted","Data":"89dfd8fb7be1b63c67523a9acad23e2b3d314f0d190f4ec1e1cebaaf22edd2b9"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.937740 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" event={"ID":"1651f201-08ba-4dea-88ff-680d91d475d3","Type":"ContainerStarted","Data":"a8a488105ef879b377d24c3647f5e6527f0b4a701557f521a4f5241afc7723f3"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.940176 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" event={"ID":"fd19ef58-a657-4733-bc5f-0917ea66ee3b","Type":"ContainerStarted","Data":"c85365547e29fefde06c302d0b3e396bddcfbce9798a82ee3115311d1e53042e"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.941086 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" event={"ID":"d036d428-1c3b-4a5c-b071-04c3270d8e0d","Type":"ContainerStarted","Data":"57bf7ace8de6d105fbb8c74a899867e9bbbe56360dfd090875da4794c84e22f7"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.942756 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" event={"ID":"2fbc7dde-da0b-48ab-9af3-0c023a9f446b","Type":"ContainerStarted","Data":"6c86249441846a23cad1163b12f03cbc576fa51d049cb9c4e3fd317120170264"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.944931 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" event={"ID":"7aee5f79-d3e2-4f7b-9047-d2ca4a048c00","Type":"ContainerStarted","Data":"b353d1e4991c17c27c5d5366c3e49e7b844658c05aa928c72d80f9d25cc487c6"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.950270 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" event={"ID":"8bda4a64-08b2-4c48-b0c6-bfb094b7b985","Type":"ContainerStarted","Data":"3d055dd8f0bfad0745822f1099400d1e517a52e21192b73a6221b2073d5708d7"} Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.954757 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" podUID="8bda4a64-08b2-4c48-b0c6-bfb094b7b985" Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.954827 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" event={"ID":"c900de25-1ca3-4a0f-8485-c0e7d1b05f12","Type":"ContainerStarted","Data":"7c50dc1e40d370ea316bcf486e013b2d477e7a80383dd4012158465a944fd587"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.957974 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" event={"ID":"85bb9f84-9769-4960-83a3-00f1a8fb9851","Type":"ContainerStarted","Data":"9a3ddadbd3038191feb0f0221632756016a1ab4a68e628f97b38e5ddc6cdd739"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.962700 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" event={"ID":"aef68c5a-5fdd-47dd-8205-8d39019e124e","Type":"ContainerStarted","Data":"8dde792c028036f13056c730b8a2a7b64aaeea8aefc322af6881aca9581afcd7"} Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.964265 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/openstack-k8s-operators/watcher-operator:d23b8876e1bcf18983498fca8ec9314bc8124a8c\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" podUID="85bb9f84-9769-4960-83a3-00f1a8fb9851" Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.964827 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" event={"ID":"1647c9ea-6d82-4f30-9641-25f10d54fbc6","Type":"ContainerStarted","Data":"cc965c43e4e2a7d066c8f8f2c8b2e4b442826ccda563bb1bd1ae8285957666a1"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.967918 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" event={"ID":"50fdc7e4-c9d8-487f-827b-5e087aebdcb0","Type":"ContainerStarted","Data":"770931f656ffe7d0d864fe132c8d410eeabb3e167f87a3ef7471e2bc141f0008"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.970797 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" event={"ID":"e1047891-b091-4202-a4fc-38abe49ced8c","Type":"ContainerStarted","Data":"ab1481f5977eb267593c896107719bcee3f20a145c2587992215c0ab5d20e4d8"} Dec 05 12:25:31 crc kubenswrapper[4711]: I1205 12:25:31.974509 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" event={"ID":"c5aa315b-0c36-4fba-a6a0-69cc18d6f21f","Type":"ContainerStarted","Data":"56531a04220256e9664d076fb86207763807e331bd77bc582af30f0947108d6f"} Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.975491 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" podUID="e1047891-b091-4202-a4fc-38abe49ced8c" Dec 05 12:25:31 crc kubenswrapper[4711]: E1205 12:25:31.984397 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" podUID="c5aa315b-0c36-4fba-a6a0-69cc18d6f21f" Dec 05 12:25:32 crc kubenswrapper[4711]: I1205 12:25:32.207352 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:32 crc kubenswrapper[4711]: I1205 12:25:32.207441 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:32 crc kubenswrapper[4711]: E1205 12:25:32.207941 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:25:32 crc kubenswrapper[4711]: E1205 12:25:32.208022 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:34.2080045 +0000 UTC m=+979.792326830 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "webhook-server-cert" not found Dec 05 12:25:32 crc kubenswrapper[4711]: E1205 12:25:32.210027 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:25:32 crc kubenswrapper[4711]: E1205 12:25:32.210104 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:34.210069871 +0000 UTC m=+979.794392201 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "metrics-server-cert" not found Dec 05 12:25:32 crc kubenswrapper[4711]: E1205 12:25:32.995375 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" podUID="e1047891-b091-4202-a4fc-38abe49ced8c" Dec 05 12:25:32 crc kubenswrapper[4711]: E1205 12:25:32.996673 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/openstack-k8s-operators/watcher-operator:d23b8876e1bcf18983498fca8ec9314bc8124a8c\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" podUID="85bb9f84-9769-4960-83a3-00f1a8fb9851" Dec 05 12:25:32 crc kubenswrapper[4711]: E1205 12:25:32.996853 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" podUID="c5aa315b-0c36-4fba-a6a0-69cc18d6f21f" Dec 05 12:25:32 crc kubenswrapper[4711]: E1205 12:25:32.996894 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" podUID="8bda4a64-08b2-4c48-b0c6-bfb094b7b985" Dec 05 12:25:33 crc kubenswrapper[4711]: I1205 12:25:33.448724 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:33 crc kubenswrapper[4711]: E1205 12:25:33.448905 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:33 crc kubenswrapper[4711]: E1205 12:25:33.448953 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert podName:162ed24d-7f1a-43d3-a543-84a19891bcd0 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:37.448938493 +0000 UTC m=+983.033260823 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert") pod "infra-operator-controller-manager-57548d458d-8b94l" (UID: "162ed24d-7f1a-43d3-a543-84a19891bcd0") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:33 crc kubenswrapper[4711]: I1205 12:25:33.956104 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:33 crc kubenswrapper[4711]: E1205 12:25:33.956492 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:33 crc kubenswrapper[4711]: E1205 12:25:33.956575 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert podName:1b0cf425-4752-41b6-9a30-862e38015368 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:37.956555101 +0000 UTC m=+983.540877431 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" (UID: "1b0cf425-4752-41b6-9a30-862e38015368") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:34 crc kubenswrapper[4711]: I1205 12:25:34.270720 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:34 crc kubenswrapper[4711]: I1205 12:25:34.270909 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:34 crc kubenswrapper[4711]: E1205 12:25:34.271110 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:25:34 crc kubenswrapper[4711]: E1205 12:25:34.271170 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:38.27115236 +0000 UTC m=+983.855474690 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "metrics-server-cert" not found Dec 05 12:25:34 crc kubenswrapper[4711]: E1205 12:25:34.271577 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:25:34 crc kubenswrapper[4711]: E1205 12:25:34.271610 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:38.271599401 +0000 UTC m=+983.855921731 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "webhook-server-cert" not found Dec 05 12:25:37 crc kubenswrapper[4711]: I1205 12:25:37.522293 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:37 crc kubenswrapper[4711]: E1205 12:25:37.523050 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:37 crc kubenswrapper[4711]: E1205 12:25:37.523095 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert podName:162ed24d-7f1a-43d3-a543-84a19891bcd0 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:45.523081495 +0000 UTC m=+991.107403825 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert") pod "infra-operator-controller-manager-57548d458d-8b94l" (UID: "162ed24d-7f1a-43d3-a543-84a19891bcd0") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:38 crc kubenswrapper[4711]: I1205 12:25:38.030581 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:38 crc kubenswrapper[4711]: E1205 12:25:38.030721 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:38 crc kubenswrapper[4711]: E1205 12:25:38.030775 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert podName:1b0cf425-4752-41b6-9a30-862e38015368 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.030758446 +0000 UTC m=+991.615080776 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" (UID: "1b0cf425-4752-41b6-9a30-862e38015368") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:25:38 crc kubenswrapper[4711]: I1205 12:25:38.334477 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:38 crc kubenswrapper[4711]: I1205 12:25:38.334567 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:38 crc kubenswrapper[4711]: E1205 12:25:38.334671 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:25:38 crc kubenswrapper[4711]: E1205 12:25:38.334730 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:25:38 crc kubenswrapper[4711]: E1205 12:25:38.334785 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.334758672 +0000 UTC m=+991.919081032 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "webhook-server-cert" not found Dec 05 12:25:38 crc kubenswrapper[4711]: E1205 12:25:38.334813 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs podName:09a6ffec-d739-4b72-8b6b-83609ed4f571 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.334800683 +0000 UTC m=+991.919123043 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs") pod "openstack-operator-controller-manager-fcb8bd8db-wg5k4" (UID: "09a6ffec-d739-4b72-8b6b-83609ed4f571") : secret "metrics-server-cert" not found Dec 05 12:25:45 crc kubenswrapper[4711]: E1205 12:25:45.403831 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d" Dec 05 12:25:45 crc kubenswrapper[4711]: E1205 12:25:45.404347 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tmhck,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-c989k_openstack-operators(1651f201-08ba-4dea-88ff-680d91d475d3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:25:45 crc kubenswrapper[4711]: I1205 12:25:45.567486 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:25:45 crc kubenswrapper[4711]: E1205 12:25:45.567772 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:45 crc kubenswrapper[4711]: E1205 12:25:45.567858 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert podName:162ed24d-7f1a-43d3-a543-84a19891bcd0 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:01.567839058 +0000 UTC m=+1007.152161388 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert") pod "infra-operator-controller-manager-57548d458d-8b94l" (UID: "162ed24d-7f1a-43d3-a543-84a19891bcd0") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:25:46 crc kubenswrapper[4711]: E1205 12:25:46.041259 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85" Dec 05 12:25:46 crc kubenswrapper[4711]: E1205 12:25:46.041562 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5hvld,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-jxmwb_openstack-operators(bd9c2a02-45a9-444e-b026-18f6f632d157): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:25:46 crc kubenswrapper[4711]: I1205 12:25:46.074904 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:46 crc kubenswrapper[4711]: I1205 12:25:46.082769 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1b0cf425-4752-41b6-9a30-862e38015368-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m\" (UID: \"1b0cf425-4752-41b6-9a30-862e38015368\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:46 crc kubenswrapper[4711]: I1205 12:25:46.267179 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:25:46 crc kubenswrapper[4711]: I1205 12:25:46.379769 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:46 crc kubenswrapper[4711]: I1205 12:25:46.379857 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:46 crc kubenswrapper[4711]: I1205 12:25:46.384115 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-metrics-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:46 crc kubenswrapper[4711]: I1205 12:25:46.386179 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/09a6ffec-d739-4b72-8b6b-83609ed4f571-webhook-certs\") pod \"openstack-operator-controller-manager-fcb8bd8db-wg5k4\" (UID: \"09a6ffec-d739-4b72-8b6b-83609ed4f571\") " pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:46 crc kubenswrapper[4711]: I1205 12:25:46.536114 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:46 crc kubenswrapper[4711]: E1205 12:25:46.552626 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 05 12:25:46 crc kubenswrapper[4711]: E1205 12:25:46.552808 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7hbp4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-sjdwh_openstack-operators(50fdc7e4-c9d8-487f-827b-5e087aebdcb0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:25:47 crc kubenswrapper[4711]: E1205 12:25:47.080486 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9" Dec 05 12:25:47 crc kubenswrapper[4711]: E1205 12:25:47.081228 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kj8vz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-b2tj4_openstack-operators(2fbc7dde-da0b-48ab-9af3-0c023a9f446b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:25:47 crc kubenswrapper[4711]: E1205 12:25:47.597730 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 05 12:25:47 crc kubenswrapper[4711]: E1205 12:25:47.597898 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ghwfj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-wl765_openstack-operators(1647c9ea-6d82-4f30-9641-25f10d54fbc6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:25:48 crc kubenswrapper[4711]: E1205 12:25:48.165917 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 05 12:25:48 crc kubenswrapper[4711]: E1205 12:25:48.166144 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q77kc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-mmwc9_openstack-operators(c900de25-1ca3-4a0f-8485-c0e7d1b05f12): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:25:48 crc kubenswrapper[4711]: E1205 12:25:48.690174 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809" Dec 05 12:25:48 crc kubenswrapper[4711]: E1205 12:25:48.690345 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-88chz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-z9pxv_openstack-operators(81f3007f-2841-4b35-b36d-7527ad69da4f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:25:49 crc kubenswrapper[4711]: E1205 12:25:49.132730 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 12:25:49 crc kubenswrapper[4711]: E1205 12:25:49.132936 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tnzkd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-fs4fj_openstack-operators(aef68c5a-5fdd-47dd-8205-8d39019e124e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:25:49 crc kubenswrapper[4711]: E1205 12:25:49.666577 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 12:25:49 crc kubenswrapper[4711]: E1205 12:25:49.666975 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-w972m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-fs7kh_openstack-operators(7aee5f79-d3e2-4f7b-9047-d2ca4a048c00): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:25:52 crc kubenswrapper[4711]: I1205 12:25:52.952621 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m"] Dec 05 12:25:53 crc kubenswrapper[4711]: W1205 12:25:53.043071 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b0cf425_4752_41b6_9a30_862e38015368.slice/crio-77782d9345b914ce29434eb20b37d9f9fd27f68552ed6377544823709725da21 WatchSource:0}: Error finding container 77782d9345b914ce29434eb20b37d9f9fd27f68552ed6377544823709725da21: Status 404 returned error can't find the container with id 77782d9345b914ce29434eb20b37d9f9fd27f68552ed6377544823709725da21 Dec 05 12:25:53 crc kubenswrapper[4711]: I1205 12:25:53.116712 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4"] Dec 05 12:25:53 crc kubenswrapper[4711]: I1205 12:25:53.127778 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" event={"ID":"1b0cf425-4752-41b6-9a30-862e38015368","Type":"ContainerStarted","Data":"77782d9345b914ce29434eb20b37d9f9fd27f68552ed6377544823709725da21"} Dec 05 12:25:53 crc kubenswrapper[4711]: W1205 12:25:53.502415 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09a6ffec_d739_4b72_8b6b_83609ed4f571.slice/crio-a1fdc1dcf066ff9c9e2e7634cf8fdbf305be2c491142c73c2b09864a44e55578 WatchSource:0}: Error finding container a1fdc1dcf066ff9c9e2e7634cf8fdbf305be2c491142c73c2b09864a44e55578: Status 404 returned error can't find the container with id a1fdc1dcf066ff9c9e2e7634cf8fdbf305be2c491142c73c2b09864a44e55578 Dec 05 12:25:54 crc kubenswrapper[4711]: I1205 12:25:54.139799 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" event={"ID":"fd19ef58-a657-4733-bc5f-0917ea66ee3b","Type":"ContainerStarted","Data":"4e3bcc0835c35dd05bab7b7f71f322f8adb411763310816dd2098e1f46d3b3e1"} Dec 05 12:25:54 crc kubenswrapper[4711]: I1205 12:25:54.142431 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" event={"ID":"f6b616b6-23f5-4671-8d91-cc11317f07a6","Type":"ContainerStarted","Data":"ccee23e8092e5bcfd4a1448dc35a8aacd7927d6e0d09a9f442c536e802911537"} Dec 05 12:25:54 crc kubenswrapper[4711]: I1205 12:25:54.145866 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" event={"ID":"d036d428-1c3b-4a5c-b071-04c3270d8e0d","Type":"ContainerStarted","Data":"5a2436fd398d518b86ea3cf7640b60d8dab93fa921cc108732941a19cde4ba9e"} Dec 05 12:25:54 crc kubenswrapper[4711]: I1205 12:25:54.158652 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" event={"ID":"61d6cab9-9cd8-443b-ba0f-90de0670366b","Type":"ContainerStarted","Data":"57108f044d9ad98fba8b4486f0e18130808b3e6a6303d94a8270231542b4d213"} Dec 05 12:25:54 crc kubenswrapper[4711]: I1205 12:25:54.160047 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" event={"ID":"09a6ffec-d739-4b72-8b6b-83609ed4f571","Type":"ContainerStarted","Data":"a1fdc1dcf066ff9c9e2e7634cf8fdbf305be2c491142c73c2b09864a44e55578"} Dec 05 12:25:54 crc kubenswrapper[4711]: I1205 12:25:54.165644 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" event={"ID":"76fdb364-88f3-4033-8318-353b66329f32","Type":"ContainerStarted","Data":"c3a73445c5ce18af50baa76e7ae7c25bf669b90b2fe6eb95f72b46a826d4305e"} Dec 05 12:25:54 crc kubenswrapper[4711]: I1205 12:25:54.166813 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" event={"ID":"52651fb6-d860-4d58-9084-8d2c8dc49529","Type":"ContainerStarted","Data":"7b04003c8e29086937a362c451d633a2e5ec63a1a12af47890e34e1d3c1aea85"} Dec 05 12:25:54 crc kubenswrapper[4711]: I1205 12:25:54.168883 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" event={"ID":"0ee16c2f-bb8c-495c-a007-41444751c118","Type":"ContainerStarted","Data":"a24d0567e07837a5da0e9e6ad4e4128fe7f4dc7d7d61d41befd9759e23e0d5f4"} Dec 05 12:25:56 crc kubenswrapper[4711]: I1205 12:25:56.182044 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" event={"ID":"c5aa315b-0c36-4fba-a6a0-69cc18d6f21f","Type":"ContainerStarted","Data":"2d9d8837e6506058361ac8d8dc6b376f5e2bffa4d8e81e0923ee8b36107d9fe5"} Dec 05 12:25:57 crc kubenswrapper[4711]: I1205 12:25:57.200178 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" event={"ID":"e1047891-b091-4202-a4fc-38abe49ced8c","Type":"ContainerStarted","Data":"6b764eb5b4fc73f25b663e3a147b8f5110e4759b66514b347f2fdd192197aeea"} Dec 05 12:25:57 crc kubenswrapper[4711]: I1205 12:25:57.202756 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" event={"ID":"09a6ffec-d739-4b72-8b6b-83609ed4f571","Type":"ContainerStarted","Data":"27664a3824496a5435513ea024b02aa1918dbb8cfbc72bb8251733120f67405e"} Dec 05 12:25:57 crc kubenswrapper[4711]: I1205 12:25:57.203103 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:25:57 crc kubenswrapper[4711]: I1205 12:25:57.252211 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-tj9mg" podStartSLOduration=6.021102905 podStartE2EDuration="27.252193076s" podCreationTimestamp="2025-12-05 12:25:30 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.63990382 +0000 UTC m=+977.224226150" lastFinishedPulling="2025-12-05 12:25:52.870993991 +0000 UTC m=+998.455316321" observedRunningTime="2025-12-05 12:25:57.221704724 +0000 UTC m=+1002.806027074" watchObservedRunningTime="2025-12-05 12:25:57.252193076 +0000 UTC m=+1002.836515406" Dec 05 12:25:57 crc kubenswrapper[4711]: I1205 12:25:57.253936 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" podStartSLOduration=27.253926168 podStartE2EDuration="27.253926168s" podCreationTimestamp="2025-12-05 12:25:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:25:57.24789993 +0000 UTC m=+1002.832222250" watchObservedRunningTime="2025-12-05 12:25:57.253926168 +0000 UTC m=+1002.838248498" Dec 05 12:25:58 crc kubenswrapper[4711]: I1205 12:25:58.211646 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" event={"ID":"8bda4a64-08b2-4c48-b0c6-bfb094b7b985","Type":"ContainerStarted","Data":"ae384bfad44c9aa90c2004546f33e3f08b55891977be7c84811424c92954fa0d"} Dec 05 12:25:59 crc kubenswrapper[4711]: I1205 12:25:59.224692 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" event={"ID":"85bb9f84-9769-4960-83a3-00f1a8fb9851","Type":"ContainerStarted","Data":"6ad612fe6b93f708c51f9fd37e6f85b86369675abecde42fe5ea0d8abda07e58"} Dec 05 12:25:59 crc kubenswrapper[4711]: E1205 12:25:59.503638 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" podUID="aef68c5a-5fdd-47dd-8205-8d39019e124e" Dec 05 12:25:59 crc kubenswrapper[4711]: E1205 12:25:59.516808 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" podUID="bd9c2a02-45a9-444e-b026-18f6f632d157" Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.241611 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" event={"ID":"1b0cf425-4752-41b6-9a30-862e38015368","Type":"ContainerStarted","Data":"7d095173ffbd16be1a19ac576f45d6dbf9fe9610500b1de0683c78b10d7f25db"} Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.246022 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" event={"ID":"bd9c2a02-45a9-444e-b026-18f6f632d157","Type":"ContainerStarted","Data":"a5939f530cd41c0201475361ac83189ac0008fec95a4b48b78a0079c5289da89"} Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.251399 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" event={"ID":"aef68c5a-5fdd-47dd-8205-8d39019e124e","Type":"ContainerStarted","Data":"9bf52165f6741c96d5df86a618cd8a1e3f39d0253d4fdc53ee3b3f3d3144a2ed"} Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.256817 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" event={"ID":"85bb9f84-9769-4960-83a3-00f1a8fb9851","Type":"ContainerStarted","Data":"cb94aec871307d1e35e4368a9b2057851d986f9f8935a719e99a4f393bf594b3"} Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.256994 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.260331 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" event={"ID":"61d6cab9-9cd8-443b-ba0f-90de0670366b","Type":"ContainerStarted","Data":"3f823360a6c60262cfbb308e064a80039e7b3823e9be073fe23912b85438eb9c"} Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.261310 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.266988 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.310816 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" podStartSLOduration=2.835686708 podStartE2EDuration="31.310789644s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:30.755175332 +0000 UTC m=+976.339497662" lastFinishedPulling="2025-12-05 12:25:59.230278268 +0000 UTC m=+1004.814600598" observedRunningTime="2025-12-05 12:26:00.29723233 +0000 UTC m=+1005.881554670" watchObservedRunningTime="2025-12-05 12:26:00.310789644 +0000 UTC m=+1005.895111974" Dec 05 12:26:00 crc kubenswrapper[4711]: E1205 12:26:00.366367 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" podUID="81f3007f-2841-4b35-b36d-7527ad69da4f" Dec 05 12:26:00 crc kubenswrapper[4711]: I1205 12:26:00.392359 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" podStartSLOduration=9.579597771 podStartE2EDuration="31.392318695s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.679278871 +0000 UTC m=+977.263601191" lastFinishedPulling="2025-12-05 12:25:53.491999785 +0000 UTC m=+999.076322115" observedRunningTime="2025-12-05 12:26:00.376792322 +0000 UTC m=+1005.961114652" watchObservedRunningTime="2025-12-05 12:26:00.392318695 +0000 UTC m=+1005.976641025" Dec 05 12:26:00 crc kubenswrapper[4711]: E1205 12:26:00.458501 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" podUID="1651f201-08ba-4dea-88ff-680d91d475d3" Dec 05 12:26:00 crc kubenswrapper[4711]: E1205 12:26:00.458575 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" podUID="2fbc7dde-da0b-48ab-9af3-0c023a9f446b" Dec 05 12:26:00 crc kubenswrapper[4711]: E1205 12:26:00.463960 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" podUID="c900de25-1ca3-4a0f-8485-c0e7d1b05f12" Dec 05 12:26:00 crc kubenswrapper[4711]: E1205 12:26:00.465880 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" podUID="7aee5f79-d3e2-4f7b-9047-d2ca4a048c00" Dec 05 12:26:00 crc kubenswrapper[4711]: E1205 12:26:00.679466 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" podUID="50fdc7e4-c9d8-487f-827b-5e087aebdcb0" Dec 05 12:26:00 crc kubenswrapper[4711]: E1205 12:26:00.936157 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" podUID="1647c9ea-6d82-4f30-9641-25f10d54fbc6" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.269567 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" event={"ID":"1651f201-08ba-4dea-88ff-680d91d475d3","Type":"ContainerStarted","Data":"3ec6eb06d9904f235e45435f6290ea46f059fe7c50b14a0a848ad1c26dfef02e"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.273144 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" event={"ID":"c5aa315b-0c36-4fba-a6a0-69cc18d6f21f","Type":"ContainerStarted","Data":"9d45d2b110291e533b6118389d0caf271201b6f85fce30ecda5a7b2bd9993190"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.273655 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.276121 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" event={"ID":"aef68c5a-5fdd-47dd-8205-8d39019e124e","Type":"ContainerStarted","Data":"5d3445f09849e49784babce9e75d4f7d62763afae2f01d3317f7bdc33e284dd3"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.276543 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.276653 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.279526 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" event={"ID":"2fbc7dde-da0b-48ab-9af3-0c023a9f446b","Type":"ContainerStarted","Data":"e9f89d9b9e3ce6f3c0e1c8191fb65beb1aa128456e71a8d9b771903c701f8268"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.284441 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" event={"ID":"1b0cf425-4752-41b6-9a30-862e38015368","Type":"ContainerStarted","Data":"b7b3e6f7b6a6045569f8dcf7cff521004c1bda715df2ce2e8797f3032d423ce4"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.284571 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.286903 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" event={"ID":"f6b616b6-23f5-4671-8d91-cc11317f07a6","Type":"ContainerStarted","Data":"f3071e60f2130dd0389e2e1191c2c3870e05747901607a4131bb4d1c627cc068"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.287865 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.289907 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.291338 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" event={"ID":"8bda4a64-08b2-4c48-b0c6-bfb094b7b985","Type":"ContainerStarted","Data":"381729354e8bac30a238d63e77426b0c3fc18fdf3500397457f72af85a57dc60"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.292056 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.298671 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" event={"ID":"0ee16c2f-bb8c-495c-a007-41444751c118","Type":"ContainerStarted","Data":"bbfe7235ed6828276301334cae03008f11f18dbb2ab9c4707ef9a02dd4965044"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.298911 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.301484 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.302045 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" event={"ID":"d036d428-1c3b-4a5c-b071-04c3270d8e0d","Type":"ContainerStarted","Data":"adcca62e101895e786a438b4588ade1889b54c583ede93b94a66b782acf4ed9a"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.302702 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.304629 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.305304 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" event={"ID":"bd9c2a02-45a9-444e-b026-18f6f632d157","Type":"ContainerStarted","Data":"300d09c00a600ed5cf0787f714341a268784ba02ce66380e94d42b8db07440a2"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.305377 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.310002 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" event={"ID":"50fdc7e4-c9d8-487f-827b-5e087aebdcb0","Type":"ContainerStarted","Data":"07e43fe7ce69f2585b688564d49f73d4fa5eb1f3816a99159cbb5d9e58252abc"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.312842 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" event={"ID":"7aee5f79-d3e2-4f7b-9047-d2ca4a048c00","Type":"ContainerStarted","Data":"a476c0e743e415520a7fb18b447a90d4b8f3467db3a54b35789c42425ceb474a"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.318110 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" event={"ID":"c900de25-1ca3-4a0f-8485-c0e7d1b05f12","Type":"ContainerStarted","Data":"62667c5203720894d015b4e668b37288671884aeffdd2522d6f5027895fb7fc0"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.324789 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" podStartSLOduration=2.762599576 podStartE2EDuration="32.32477291s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.366983369 +0000 UTC m=+976.951305699" lastFinishedPulling="2025-12-05 12:26:00.929156703 +0000 UTC m=+1006.513479033" observedRunningTime="2025-12-05 12:26:01.323401826 +0000 UTC m=+1006.907724176" watchObservedRunningTime="2025-12-05 12:26:01.32477291 +0000 UTC m=+1006.909095240" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.342808 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" event={"ID":"fd19ef58-a657-4733-bc5f-0917ea66ee3b","Type":"ContainerStarted","Data":"bf01374f32a6f0ff6f9b45115cf28c58ea60e832930e34a365cae1fa8fa9692d"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.343567 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.346811 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.361201 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" podStartSLOduration=3.428476757 podStartE2EDuration="32.361182888s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.640184297 +0000 UTC m=+977.224506627" lastFinishedPulling="2025-12-05 12:26:00.572890428 +0000 UTC m=+1006.157212758" observedRunningTime="2025-12-05 12:26:01.347109091 +0000 UTC m=+1006.931431421" watchObservedRunningTime="2025-12-05 12:26:01.361182888 +0000 UTC m=+1006.945505218" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.371979 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" event={"ID":"81f3007f-2841-4b35-b36d-7527ad69da4f","Type":"ContainerStarted","Data":"38dfb37bcf104f372ddc89b86a4fc8c39858ea9507248c9629d7e6abd944cd3e"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.381968 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" event={"ID":"1647c9ea-6d82-4f30-9641-25f10d54fbc6","Type":"ContainerStarted","Data":"4ed20956a0b03e9d2ec6864ac955e7b77a6c43a26224d2c9910aa01ff2b6aed2"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.395347 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" event={"ID":"76fdb364-88f3-4033-8318-353b66329f32","Type":"ContainerStarted","Data":"8be2a85cbfd724ee6c530354a67cd659cfe2e262d71482498f5045f8b78f0a36"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.396274 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.397974 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.413734 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" podStartSLOduration=26.23952327 podStartE2EDuration="32.413721773s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:53.046352085 +0000 UTC m=+998.630674415" lastFinishedPulling="2025-12-05 12:25:59.220550548 +0000 UTC m=+1004.804872918" observedRunningTime="2025-12-05 12:26:01.411886458 +0000 UTC m=+1006.996208788" watchObservedRunningTime="2025-12-05 12:26:01.413721773 +0000 UTC m=+1006.998044103" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.427341 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" event={"ID":"52651fb6-d860-4d58-9084-8d2c8dc49529","Type":"ContainerStarted","Data":"59137bb590ca904fc02e01cf886b1ebd6e249432e2f6ce904495cf5f2f4ddc79"} Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.428614 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.434930 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.480701 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" podStartSLOduration=2.764294428 podStartE2EDuration="32.480681985s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:30.305556564 +0000 UTC m=+975.889878894" lastFinishedPulling="2025-12-05 12:26:00.021944131 +0000 UTC m=+1005.606266451" observedRunningTime="2025-12-05 12:26:01.445710752 +0000 UTC m=+1007.030033092" watchObservedRunningTime="2025-12-05 12:26:01.480681985 +0000 UTC m=+1007.065004315" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.485328 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-87csw" podStartSLOduration=3.579242326 podStartE2EDuration="32.485309959s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.681749442 +0000 UTC m=+977.266071772" lastFinishedPulling="2025-12-05 12:26:00.587817075 +0000 UTC m=+1006.172139405" observedRunningTime="2025-12-05 12:26:01.479135557 +0000 UTC m=+1007.063457897" watchObservedRunningTime="2025-12-05 12:26:01.485309959 +0000 UTC m=+1007.069632289" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.586494 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" podStartSLOduration=2.895176655 podStartE2EDuration="32.586471113s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.190933549 +0000 UTC m=+976.775255869" lastFinishedPulling="2025-12-05 12:26:00.882227997 +0000 UTC m=+1006.466550327" observedRunningTime="2025-12-05 12:26:01.581079761 +0000 UTC m=+1007.165402101" watchObservedRunningTime="2025-12-05 12:26:01.586471113 +0000 UTC m=+1007.170793463" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.605941 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-gpstg" podStartSLOduration=3.209206549 podStartE2EDuration="32.605924253s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:30.577000408 +0000 UTC m=+976.161322738" lastFinishedPulling="2025-12-05 12:25:59.973718112 +0000 UTC m=+1005.558040442" observedRunningTime="2025-12-05 12:26:01.562119663 +0000 UTC m=+1007.146442003" watchObservedRunningTime="2025-12-05 12:26:01.605924253 +0000 UTC m=+1007.190246583" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.653992 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4vr8d" podStartSLOduration=3.291226214 podStartE2EDuration="32.653973749s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:30.609768447 +0000 UTC m=+976.194090777" lastFinishedPulling="2025-12-05 12:25:59.972515982 +0000 UTC m=+1005.556838312" observedRunningTime="2025-12-05 12:26:01.65241456 +0000 UTC m=+1007.236736900" watchObservedRunningTime="2025-12-05 12:26:01.653973749 +0000 UTC m=+1007.238296079" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.654053 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.679293 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/162ed24d-7f1a-43d3-a543-84a19891bcd0-cert\") pod \"infra-operator-controller-manager-57548d458d-8b94l\" (UID: \"162ed24d-7f1a-43d3-a543-84a19891bcd0\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.714579 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.723428 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nzkbm" podStartSLOduration=4.460125489 podStartE2EDuration="32.72340425s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.377273984 +0000 UTC m=+976.961596314" lastFinishedPulling="2025-12-05 12:25:59.640552745 +0000 UTC m=+1005.224875075" observedRunningTime="2025-12-05 12:26:01.700478595 +0000 UTC m=+1007.284800925" watchObservedRunningTime="2025-12-05 12:26:01.72340425 +0000 UTC m=+1007.307726600" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.733257 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-glz4l" podStartSLOduration=4.432271722 podStartE2EDuration="32.733237543s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.259759206 +0000 UTC m=+976.844081536" lastFinishedPulling="2025-12-05 12:25:59.560725037 +0000 UTC m=+1005.145047357" observedRunningTime="2025-12-05 12:26:01.725559654 +0000 UTC m=+1007.309881984" watchObservedRunningTime="2025-12-05 12:26:01.733237543 +0000 UTC m=+1007.317559873" Dec 05 12:26:01 crc kubenswrapper[4711]: I1205 12:26:01.759976 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-vd4nf" podStartSLOduration=3.582385462 podStartE2EDuration="32.759949921s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.395454722 +0000 UTC m=+976.979777042" lastFinishedPulling="2025-12-05 12:26:00.573019171 +0000 UTC m=+1006.157341501" observedRunningTime="2025-12-05 12:26:01.757468321 +0000 UTC m=+1007.341790661" watchObservedRunningTime="2025-12-05 12:26:01.759949921 +0000 UTC m=+1007.344272251" Dec 05 12:26:02 crc kubenswrapper[4711]: I1205 12:26:02.097894 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-8b94l"] Dec 05 12:26:02 crc kubenswrapper[4711]: W1205 12:26:02.138023 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod162ed24d_7f1a_43d3_a543_84a19891bcd0.slice/crio-0a74b7d786aae05d78c50e319ea1a27665a523c168134904664be3fbffcafde6 WatchSource:0}: Error finding container 0a74b7d786aae05d78c50e319ea1a27665a523c168134904664be3fbffcafde6: Status 404 returned error can't find the container with id 0a74b7d786aae05d78c50e319ea1a27665a523c168134904664be3fbffcafde6 Dec 05 12:26:02 crc kubenswrapper[4711]: I1205 12:26:02.438138 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" event={"ID":"162ed24d-7f1a-43d3-a543-84a19891bcd0","Type":"ContainerStarted","Data":"0a74b7d786aae05d78c50e319ea1a27665a523c168134904664be3fbffcafde6"} Dec 05 12:26:02 crc kubenswrapper[4711]: I1205 12:26:02.443688 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" event={"ID":"1651f201-08ba-4dea-88ff-680d91d475d3","Type":"ContainerStarted","Data":"871b906d82bfd9a580d19153148dbc7281625c43931c6b6af38730bfec66aeee"} Dec 05 12:26:02 crc kubenswrapper[4711]: I1205 12:26:02.446197 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" Dec 05 12:26:02 crc kubenswrapper[4711]: I1205 12:26:02.447221 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-vvnrf" Dec 05 12:26:02 crc kubenswrapper[4711]: I1205 12:26:02.463989 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" podStartSLOduration=3.129776861 podStartE2EDuration="33.463966123s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.369337298 +0000 UTC m=+976.953659638" lastFinishedPulling="2025-12-05 12:26:01.70352657 +0000 UTC m=+1007.287848900" observedRunningTime="2025-12-05 12:26:02.461470141 +0000 UTC m=+1008.045792481" watchObservedRunningTime="2025-12-05 12:26:02.463966123 +0000 UTC m=+1008.048288453" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.458079 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" event={"ID":"7aee5f79-d3e2-4f7b-9047-d2ca4a048c00","Type":"ContainerStarted","Data":"f25d0bc68ae51e048787c02bf76f54101aedfdb1425e4ea04be54053636e858d"} Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.458677 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.464208 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" event={"ID":"c900de25-1ca3-4a0f-8485-c0e7d1b05f12","Type":"ContainerStarted","Data":"6cc1cab5b633746096d1d69ce5b5d7535fdc460377d11d29d165f627c94c0c80"} Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.464368 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.470475 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" event={"ID":"81f3007f-2841-4b35-b36d-7527ad69da4f","Type":"ContainerStarted","Data":"99eed2c006c6b054a85df58e83cf4ad8fe79657d4a69f4f291539b1b520de567"} Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.470595 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.479239 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" event={"ID":"1647c9ea-6d82-4f30-9641-25f10d54fbc6","Type":"ContainerStarted","Data":"a25a02ab44760ca9cc8a66a3524e5bcd4e24dec879a688495a0e55a36c555424"} Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.479397 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.481650 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" event={"ID":"2fbc7dde-da0b-48ab-9af3-0c023a9f446b","Type":"ContainerStarted","Data":"ac3820843d5961940bef04cbf2f62ad102132543e3c1a0d26ea9b2ad0eed15b6"} Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.481777 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.482673 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" podStartSLOduration=3.512685433 podStartE2EDuration="34.482661726s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.185157145 +0000 UTC m=+976.769479475" lastFinishedPulling="2025-12-05 12:26:02.155133438 +0000 UTC m=+1007.739455768" observedRunningTime="2025-12-05 12:26:03.477116829 +0000 UTC m=+1009.061439169" watchObservedRunningTime="2025-12-05 12:26:03.482661726 +0000 UTC m=+1009.066984056" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.483252 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" event={"ID":"50fdc7e4-c9d8-487f-827b-5e087aebdcb0","Type":"ContainerStarted","Data":"52c0680f050e8ff652c78bbf5d11a45f32e2702a575ceddcc2ac1da826d1a306"} Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.484206 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.504595 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" podStartSLOduration=3.621902936 podStartE2EDuration="34.504575625s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.250976478 +0000 UTC m=+976.835298808" lastFinishedPulling="2025-12-05 12:26:02.133649167 +0000 UTC m=+1007.717971497" observedRunningTime="2025-12-05 12:26:03.499230804 +0000 UTC m=+1009.083553134" watchObservedRunningTime="2025-12-05 12:26:03.504575625 +0000 UTC m=+1009.088897955" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.526206 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" podStartSLOduration=4.011618468 podStartE2EDuration="34.526190479s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.639860759 +0000 UTC m=+977.224183089" lastFinishedPulling="2025-12-05 12:26:02.15443277 +0000 UTC m=+1007.738755100" observedRunningTime="2025-12-05 12:26:03.526039375 +0000 UTC m=+1009.110361705" watchObservedRunningTime="2025-12-05 12:26:03.526190479 +0000 UTC m=+1009.110512809" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.550759 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" podStartSLOduration=3.792385742 podStartE2EDuration="34.550739174s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.393305319 +0000 UTC m=+976.977627649" lastFinishedPulling="2025-12-05 12:26:02.151658751 +0000 UTC m=+1007.735981081" observedRunningTime="2025-12-05 12:26:03.546749916 +0000 UTC m=+1009.131072246" watchObservedRunningTime="2025-12-05 12:26:03.550739174 +0000 UTC m=+1009.135061504" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.572999 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" podStartSLOduration=4.053153032 podStartE2EDuration="34.572984963s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.639826518 +0000 UTC m=+977.224148838" lastFinishedPulling="2025-12-05 12:26:02.159658439 +0000 UTC m=+1007.743980769" observedRunningTime="2025-12-05 12:26:03.566325809 +0000 UTC m=+1009.150648139" watchObservedRunningTime="2025-12-05 12:26:03.572984963 +0000 UTC m=+1009.157307293" Dec 05 12:26:03 crc kubenswrapper[4711]: I1205 12:26:03.585924 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" podStartSLOduration=3.731816588 podStartE2EDuration="34.585908132s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:25:31.197743776 +0000 UTC m=+976.782066106" lastFinishedPulling="2025-12-05 12:26:02.05183532 +0000 UTC m=+1007.636157650" observedRunningTime="2025-12-05 12:26:03.581477232 +0000 UTC m=+1009.165799552" watchObservedRunningTime="2025-12-05 12:26:03.585908132 +0000 UTC m=+1009.170230462" Dec 05 12:26:06 crc kubenswrapper[4711]: I1205 12:26:06.273597 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m" Dec 05 12:26:06 crc kubenswrapper[4711]: I1205 12:26:06.542026 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-fcb8bd8db-wg5k4" Dec 05 12:26:09 crc kubenswrapper[4711]: I1205 12:26:09.869326 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wl765" Dec 05 12:26:10 crc kubenswrapper[4711]: I1205 12:26:10.041608 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-z9pxv" Dec 05 12:26:10 crc kubenswrapper[4711]: I1205 12:26:10.059266 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-jxmwb" Dec 05 12:26:10 crc kubenswrapper[4711]: I1205 12:26:10.120983 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-fs4fj" Dec 05 12:26:10 crc kubenswrapper[4711]: I1205 12:26:10.144943 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-b2tj4" Dec 05 12:26:10 crc kubenswrapper[4711]: I1205 12:26:10.237803 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fs7kh" Dec 05 12:26:10 crc kubenswrapper[4711]: I1205 12:26:10.252236 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-mmwc9" Dec 05 12:26:10 crc kubenswrapper[4711]: I1205 12:26:10.539898 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-sjdwh" Dec 05 12:26:10 crc kubenswrapper[4711]: I1205 12:26:10.636335 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c989k" Dec 05 12:26:10 crc kubenswrapper[4711]: I1205 12:26:10.846866 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6c9545865c-7z2lk" Dec 05 12:26:17 crc kubenswrapper[4711]: E1205 12:26:17.181895 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7" Dec 05 12:26:17 crc kubenswrapper[4711]: E1205 12:26:17.182602 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vw7v8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-57548d458d-8b94l_openstack-operators(162ed24d-7f1a-43d3-a543-84a19891bcd0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:26:17 crc kubenswrapper[4711]: E1205 12:26:17.363554 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" podUID="162ed24d-7f1a-43d3-a543-84a19891bcd0" Dec 05 12:26:17 crc kubenswrapper[4711]: I1205 12:26:17.611888 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" event={"ID":"162ed24d-7f1a-43d3-a543-84a19891bcd0","Type":"ContainerStarted","Data":"7a2ba7f6d6a6f03c1c81c3a86056abef642961b0f7de0a60a5b4b2e9ba285083"} Dec 05 12:26:17 crc kubenswrapper[4711]: E1205 12:26:17.613101 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7\\\"\"" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" podUID="162ed24d-7f1a-43d3-a543-84a19891bcd0" Dec 05 12:26:18 crc kubenswrapper[4711]: E1205 12:26:18.623355 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7\\\"\"" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" podUID="162ed24d-7f1a-43d3-a543-84a19891bcd0" Dec 05 12:26:32 crc kubenswrapper[4711]: I1205 12:26:32.741902 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" event={"ID":"162ed24d-7f1a-43d3-a543-84a19891bcd0","Type":"ContainerStarted","Data":"bc0945f0a7693902d59849b011165b002b66b3649d2ce9bcb012047187faf6be"} Dec 05 12:26:32 crc kubenswrapper[4711]: I1205 12:26:32.742781 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:26:32 crc kubenswrapper[4711]: I1205 12:26:32.761923 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" podStartSLOduration=33.569524136 podStartE2EDuration="1m3.761906498s" podCreationTimestamp="2025-12-05 12:25:29 +0000 UTC" firstStartedPulling="2025-12-05 12:26:02.154132573 +0000 UTC m=+1007.738454903" lastFinishedPulling="2025-12-05 12:26:32.346514925 +0000 UTC m=+1037.930837265" observedRunningTime="2025-12-05 12:26:32.760470283 +0000 UTC m=+1038.344792633" watchObservedRunningTime="2025-12-05 12:26:32.761906498 +0000 UTC m=+1038.346228828" Dec 05 12:26:41 crc kubenswrapper[4711]: I1205 12:26:41.722334 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-8b94l" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.762261 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57b9d58665-d8xpl"] Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.768001 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.778057 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.778156 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.778211 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.778369 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-l8vls" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.787975 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57b9d58665-d8xpl"] Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.858201 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bb9bf987-q2zfm"] Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.859757 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.861821 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.873209 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bb9bf987-q2zfm"] Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.939438 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szdvb\" (UniqueName: \"kubernetes.io/projected/b706c014-e61e-4b7a-946f-8f9a05a31dd7-kube-api-access-szdvb\") pod \"dnsmasq-dns-7bb9bf987-q2zfm\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.939600 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbztk\" (UniqueName: \"kubernetes.io/projected/34ca27ad-57c4-404a-b1f1-7590f648e9fa-kube-api-access-rbztk\") pod \"dnsmasq-dns-57b9d58665-d8xpl\" (UID: \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\") " pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.939704 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-config\") pod \"dnsmasq-dns-7bb9bf987-q2zfm\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.939767 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-dns-svc\") pod \"dnsmasq-dns-7bb9bf987-q2zfm\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:26:59 crc kubenswrapper[4711]: I1205 12:26:59.939815 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ca27ad-57c4-404a-b1f1-7590f648e9fa-config\") pod \"dnsmasq-dns-57b9d58665-d8xpl\" (UID: \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\") " pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.040934 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-config\") pod \"dnsmasq-dns-7bb9bf987-q2zfm\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.041000 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-dns-svc\") pod \"dnsmasq-dns-7bb9bf987-q2zfm\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.041023 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ca27ad-57c4-404a-b1f1-7590f648e9fa-config\") pod \"dnsmasq-dns-57b9d58665-d8xpl\" (UID: \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\") " pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.041068 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szdvb\" (UniqueName: \"kubernetes.io/projected/b706c014-e61e-4b7a-946f-8f9a05a31dd7-kube-api-access-szdvb\") pod \"dnsmasq-dns-7bb9bf987-q2zfm\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.041123 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbztk\" (UniqueName: \"kubernetes.io/projected/34ca27ad-57c4-404a-b1f1-7590f648e9fa-kube-api-access-rbztk\") pod \"dnsmasq-dns-57b9d58665-d8xpl\" (UID: \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\") " pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.042110 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-config\") pod \"dnsmasq-dns-7bb9bf987-q2zfm\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.042111 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-dns-svc\") pod \"dnsmasq-dns-7bb9bf987-q2zfm\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.042174 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ca27ad-57c4-404a-b1f1-7590f648e9fa-config\") pod \"dnsmasq-dns-57b9d58665-d8xpl\" (UID: \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\") " pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.067095 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbztk\" (UniqueName: \"kubernetes.io/projected/34ca27ad-57c4-404a-b1f1-7590f648e9fa-kube-api-access-rbztk\") pod \"dnsmasq-dns-57b9d58665-d8xpl\" (UID: \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\") " pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.070174 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szdvb\" (UniqueName: \"kubernetes.io/projected/b706c014-e61e-4b7a-946f-8f9a05a31dd7-kube-api-access-szdvb\") pod \"dnsmasq-dns-7bb9bf987-q2zfm\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.090359 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.184247 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.638887 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57b9d58665-d8xpl"] Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.741142 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bb9bf987-q2zfm"] Dec 05 12:27:00 crc kubenswrapper[4711]: I1205 12:27:00.980915 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" event={"ID":"b706c014-e61e-4b7a-946f-8f9a05a31dd7","Type":"ContainerStarted","Data":"98247649b213931c81a241cbdebd015d011098412af85fca8043919b0e525769"} Dec 05 12:27:01 crc kubenswrapper[4711]: I1205 12:27:01.024363 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" event={"ID":"34ca27ad-57c4-404a-b1f1-7590f648e9fa","Type":"ContainerStarted","Data":"ea455765d74effff0033a1a6b9f12f0e6d2e47bfd7c79576d03eed0d235e952e"} Dec 05 12:27:03 crc kubenswrapper[4711]: I1205 12:27:03.759843 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57b9d58665-d8xpl"] Dec 05 12:27:03 crc kubenswrapper[4711]: I1205 12:27:03.789805 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78d759bd75-hxcbk"] Dec 05 12:27:03 crc kubenswrapper[4711]: I1205 12:27:03.790965 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:03 crc kubenswrapper[4711]: I1205 12:27:03.812364 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78d759bd75-hxcbk"] Dec 05 12:27:03 crc kubenswrapper[4711]: I1205 12:27:03.926519 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fsmg\" (UniqueName: \"kubernetes.io/projected/941bc276-a319-4746-9fc2-be30d07cde1f-kube-api-access-2fsmg\") pod \"dnsmasq-dns-78d759bd75-hxcbk\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:03 crc kubenswrapper[4711]: I1205 12:27:03.926684 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-config\") pod \"dnsmasq-dns-78d759bd75-hxcbk\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:03 crc kubenswrapper[4711]: I1205 12:27:03.927054 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-dns-svc\") pod \"dnsmasq-dns-78d759bd75-hxcbk\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.030360 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-dns-svc\") pod \"dnsmasq-dns-78d759bd75-hxcbk\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.030441 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fsmg\" (UniqueName: \"kubernetes.io/projected/941bc276-a319-4746-9fc2-be30d07cde1f-kube-api-access-2fsmg\") pod \"dnsmasq-dns-78d759bd75-hxcbk\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.030464 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-config\") pod \"dnsmasq-dns-78d759bd75-hxcbk\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.031480 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-config\") pod \"dnsmasq-dns-78d759bd75-hxcbk\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.032440 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-dns-svc\") pod \"dnsmasq-dns-78d759bd75-hxcbk\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.092311 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fsmg\" (UniqueName: \"kubernetes.io/projected/941bc276-a319-4746-9fc2-be30d07cde1f-kube-api-access-2fsmg\") pod \"dnsmasq-dns-78d759bd75-hxcbk\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.124130 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.192738 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bb9bf987-q2zfm"] Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.224502 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bf86d7f99-djjb6"] Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.274432 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.280452 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bf86d7f99-djjb6"] Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.355708 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqlv9\" (UniqueName: \"kubernetes.io/projected/42f938ad-b496-4167-831d-ea2f3f865785-kube-api-access-qqlv9\") pod \"dnsmasq-dns-bf86d7f99-djjb6\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.360085 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-dns-svc\") pod \"dnsmasq-dns-bf86d7f99-djjb6\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.360460 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-config\") pod \"dnsmasq-dns-bf86d7f99-djjb6\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.466331 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-dns-svc\") pod \"dnsmasq-dns-bf86d7f99-djjb6\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.466479 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-config\") pod \"dnsmasq-dns-bf86d7f99-djjb6\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.466509 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqlv9\" (UniqueName: \"kubernetes.io/projected/42f938ad-b496-4167-831d-ea2f3f865785-kube-api-access-qqlv9\") pod \"dnsmasq-dns-bf86d7f99-djjb6\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.467508 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-dns-svc\") pod \"dnsmasq-dns-bf86d7f99-djjb6\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.467678 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-config\") pod \"dnsmasq-dns-bf86d7f99-djjb6\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.497841 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqlv9\" (UniqueName: \"kubernetes.io/projected/42f938ad-b496-4167-831d-ea2f3f865785-kube-api-access-qqlv9\") pod \"dnsmasq-dns-bf86d7f99-djjb6\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.631946 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78d759bd75-hxcbk"] Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.642483 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.664617 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86fbff885-v6wq7"] Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.666048 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.703312 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86fbff885-v6wq7"] Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.769887 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-dns-svc\") pod \"dnsmasq-dns-86fbff885-v6wq7\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.770454 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmv9g\" (UniqueName: \"kubernetes.io/projected/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-kube-api-access-wmv9g\") pod \"dnsmasq-dns-86fbff885-v6wq7\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.770530 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-config\") pod \"dnsmasq-dns-86fbff885-v6wq7\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.871556 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmv9g\" (UniqueName: \"kubernetes.io/projected/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-kube-api-access-wmv9g\") pod \"dnsmasq-dns-86fbff885-v6wq7\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.871636 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-config\") pod \"dnsmasq-dns-86fbff885-v6wq7\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.871685 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-dns-svc\") pod \"dnsmasq-dns-86fbff885-v6wq7\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.872852 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-config\") pod \"dnsmasq-dns-86fbff885-v6wq7\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.872974 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-dns-svc\") pod \"dnsmasq-dns-86fbff885-v6wq7\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.889521 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmv9g\" (UniqueName: \"kubernetes.io/projected/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-kube-api-access-wmv9g\") pod \"dnsmasq-dns-86fbff885-v6wq7\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.931307 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.932986 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.938424 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.938624 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.938799 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.939049 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.939203 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-w7khg" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.939364 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.940876 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.952439 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78d759bd75-hxcbk"] Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.967244 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:27:04 crc kubenswrapper[4711]: I1205 12:27:04.996566 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:27:05 crc kubenswrapper[4711]: W1205 12:27:05.008291 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod941bc276_a319_4746_9fc2_be30d07cde1f.slice/crio-dfad8c492224b0228920e03e2e76992a88100834f3ba616c241047e641a2f29f WatchSource:0}: Error finding container dfad8c492224b0228920e03e2e76992a88100834f3ba616c241047e641a2f29f: Status 404 returned error can't find the container with id dfad8c492224b0228920e03e2e76992a88100834f3ba616c241047e641a2f29f Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.074981 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.075046 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bff43887-0cb0-4da0-a16f-6264877c473e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.075093 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.075116 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.075170 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.075196 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.075255 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bff43887-0cb0-4da0-a16f-6264877c473e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.075316 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.075436 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.076199 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.076233 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz9lp\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-kube-api-access-zz9lp\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.088973 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" event={"ID":"941bc276-a319-4746-9fc2-be30d07cde1f","Type":"ContainerStarted","Data":"dfad8c492224b0228920e03e2e76992a88100834f3ba616c241047e641a2f29f"} Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.179708 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.179763 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bff43887-0cb0-4da0-a16f-6264877c473e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.179812 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.179920 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.179950 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.180002 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz9lp\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-kube-api-access-zz9lp\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.180078 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.180136 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bff43887-0cb0-4da0-a16f-6264877c473e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.180168 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.180207 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.180253 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.180646 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.181137 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.181500 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.181950 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.183877 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.184297 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.188293 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bff43887-0cb0-4da0-a16f-6264877c473e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.189128 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bff43887-0cb0-4da0-a16f-6264877c473e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.191085 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.192614 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.208518 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz9lp\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-kube-api-access-zz9lp\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.232984 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.287124 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.314074 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bf86d7f99-djjb6"] Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.393940 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.395235 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.403184 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.403519 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.403697 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.418809 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.404752 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-g8wzk" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.404797 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.404858 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.404884 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.489969 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490029 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490065 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490174 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czcns\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-kube-api-access-czcns\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490249 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490329 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490359 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490423 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c5ef8db4-5a97-483c-a168-bcf6368849a2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490570 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490612 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c5ef8db4-5a97-483c-a168-bcf6368849a2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.490656 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-config-data\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.578663 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86fbff885-v6wq7"] Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592208 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592248 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592278 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c5ef8db4-5a97-483c-a168-bcf6368849a2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592322 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592341 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c5ef8db4-5a97-483c-a168-bcf6368849a2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592360 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-config-data\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592402 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592419 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592443 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592461 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czcns\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-kube-api-access-czcns\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.592485 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.593256 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.594795 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-config-data\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.595033 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.596758 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.597018 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.597869 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.598076 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.601418 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.604358 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c5ef8db4-5a97-483c-a168-bcf6368849a2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.612496 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c5ef8db4-5a97-483c-a168-bcf6368849a2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.617725 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czcns\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-kube-api-access-czcns\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.634531 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.740441 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.820129 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.821454 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.826724 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-erlang-cookie" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.826732 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-default-user" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.826915 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-config-data" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.827122 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-server-dockercfg-2fgjm" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.827122 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-server-conf" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.827156 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-notifications-svc" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.827203 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-plugins-conf" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.864104 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.888230 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.898796 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.898870 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.898915 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d1b49e15-30ab-4ef7-8980-436468104f7b-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.898964 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d1b49e15-30ab-4ef7-8980-436468104f7b-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.898982 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d1b49e15-30ab-4ef7-8980-436468104f7b-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.899042 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.899059 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.899089 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrl4w\" (UniqueName: \"kubernetes.io/projected/d1b49e15-30ab-4ef7-8980-436468104f7b-kube-api-access-nrl4w\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.899143 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.899197 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d1b49e15-30ab-4ef7-8980-436468104f7b-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: I1205 12:27:05.899210 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d1b49e15-30ab-4ef7-8980-436468104f7b-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:05 crc kubenswrapper[4711]: W1205 12:27:05.917180 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbff43887_0cb0_4da0_a16f_6264877c473e.slice/crio-510ca734e87fa52d76b2e6cefcc4ffb4a033bcb1aedee970c8fe0dd4422caede WatchSource:0}: Error finding container 510ca734e87fa52d76b2e6cefcc4ffb4a033bcb1aedee970c8fe0dd4422caede: Status 404 returned error can't find the container with id 510ca734e87fa52d76b2e6cefcc4ffb4a033bcb1aedee970c8fe0dd4422caede Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000412 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d1b49e15-30ab-4ef7-8980-436468104f7b-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000459 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d1b49e15-30ab-4ef7-8980-436468104f7b-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000520 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000541 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000588 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrl4w\" (UniqueName: \"kubernetes.io/projected/d1b49e15-30ab-4ef7-8980-436468104f7b-kube-api-access-nrl4w\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000627 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000679 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d1b49e15-30ab-4ef7-8980-436468104f7b-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000695 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d1b49e15-30ab-4ef7-8980-436468104f7b-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000797 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000842 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.000876 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d1b49e15-30ab-4ef7-8980-436468104f7b-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.002789 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d1b49e15-30ab-4ef7-8980-436468104f7b-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.002790 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d1b49e15-30ab-4ef7-8980-436468104f7b-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.003178 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.003278 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.008083 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.009250 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d1b49e15-30ab-4ef7-8980-436468104f7b-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.012551 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.013295 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d1b49e15-30ab-4ef7-8980-436468104f7b-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.018010 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d1b49e15-30ab-4ef7-8980-436468104f7b-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.052929 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d1b49e15-30ab-4ef7-8980-436468104f7b-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.067660 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrl4w\" (UniqueName: \"kubernetes.io/projected/d1b49e15-30ab-4ef7-8980-436468104f7b-kube-api-access-nrl4w\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.102575 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"d1b49e15-30ab-4ef7-8980-436468104f7b\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.165549 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.166654 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86fbff885-v6wq7" event={"ID":"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa","Type":"ContainerStarted","Data":"2146e563c3dffc7e1da7d98ee69aa7e3e1e0049d9fbba1b388fb5e751807ef0d"} Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.204731 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bff43887-0cb0-4da0-a16f-6264877c473e","Type":"ContainerStarted","Data":"510ca734e87fa52d76b2e6cefcc4ffb4a033bcb1aedee970c8fe0dd4422caede"} Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.212981 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" event={"ID":"42f938ad-b496-4167-831d-ea2f3f865785","Type":"ContainerStarted","Data":"21cb03c743da6906ae58f5580b7c3dd2879e9257fd4fe97633ce21ec6a65626e"} Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.333517 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:27:06 crc kubenswrapper[4711]: I1205 12:27:06.946751 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.279706 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5ef8db4-5a97-483c-a168-bcf6368849a2","Type":"ContainerStarted","Data":"fca05599870c7dc542b1e55be001a2eae81916699792d388099d7deb20454cb3"} Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.281999 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"d1b49e15-30ab-4ef7-8980-436468104f7b","Type":"ContainerStarted","Data":"9ef2b40bedfb461c35083d1fe60da8246352e6a8c2abf321130523ac76968752"} Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.307477 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.309550 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.332447 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.333199 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.333440 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-npl2g" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.337665 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.340999 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.341614 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.351947 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/feb938be-1a43-402f-8373-47a6c9217d9c-kolla-config\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.351991 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/feb938be-1a43-402f-8373-47a6c9217d9c-config-data-default\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.352097 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/feb938be-1a43-402f-8373-47a6c9217d9c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.352151 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.352173 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/feb938be-1a43-402f-8373-47a6c9217d9c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.352221 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/feb938be-1a43-402f-8373-47a6c9217d9c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.352248 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gck2p\" (UniqueName: \"kubernetes.io/projected/feb938be-1a43-402f-8373-47a6c9217d9c-kube-api-access-gck2p\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.352276 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb938be-1a43-402f-8373-47a6c9217d9c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.459029 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/feb938be-1a43-402f-8373-47a6c9217d9c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.459104 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.459125 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/feb938be-1a43-402f-8373-47a6c9217d9c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.459165 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/feb938be-1a43-402f-8373-47a6c9217d9c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.459185 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gck2p\" (UniqueName: \"kubernetes.io/projected/feb938be-1a43-402f-8373-47a6c9217d9c-kube-api-access-gck2p\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.459206 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb938be-1a43-402f-8373-47a6c9217d9c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.459224 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/feb938be-1a43-402f-8373-47a6c9217d9c-kolla-config\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.459242 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/feb938be-1a43-402f-8373-47a6c9217d9c-config-data-default\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.459703 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/feb938be-1a43-402f-8373-47a6c9217d9c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.460333 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/feb938be-1a43-402f-8373-47a6c9217d9c-config-data-default\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.460971 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.461575 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/feb938be-1a43-402f-8373-47a6c9217d9c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.462067 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/feb938be-1a43-402f-8373-47a6c9217d9c-kolla-config\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.479592 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gck2p\" (UniqueName: \"kubernetes.io/projected/feb938be-1a43-402f-8373-47a6c9217d9c-kube-api-access-gck2p\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.494588 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb938be-1a43-402f-8373-47a6c9217d9c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.508728 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/feb938be-1a43-402f-8373-47a6c9217d9c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.539281 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"feb938be-1a43-402f-8373-47a6c9217d9c\") " pod="openstack/openstack-galera-0" Dec 05 12:27:07 crc kubenswrapper[4711]: I1205 12:27:07.663150 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.505170 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 12:27:08 crc kubenswrapper[4711]: W1205 12:27:08.554225 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfeb938be_1a43_402f_8373_47a6c9217d9c.slice/crio-8d43c44db41e25197c3b8e88d856dfc07ecb34f2086b4b2495fa497924a28490 WatchSource:0}: Error finding container 8d43c44db41e25197c3b8e88d856dfc07ecb34f2086b4b2495fa497924a28490: Status 404 returned error can't find the container with id 8d43c44db41e25197c3b8e88d856dfc07ecb34f2086b4b2495fa497924a28490 Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.827063 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.828989 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.843214 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.843249 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.843727 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-pq58m" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.844287 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.915269 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.942494 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6768dcf2-c875-4467-9da9-3857b2fdb2e3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.942536 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6768dcf2-c875-4467-9da9-3857b2fdb2e3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.942574 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6768dcf2-c875-4467-9da9-3857b2fdb2e3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.942591 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6768dcf2-c875-4467-9da9-3857b2fdb2e3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.942623 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stnfv\" (UniqueName: \"kubernetes.io/projected/6768dcf2-c875-4467-9da9-3857b2fdb2e3-kube-api-access-stnfv\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.942678 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6768dcf2-c875-4467-9da9-3857b2fdb2e3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.942723 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:08 crc kubenswrapper[4711]: I1205 12:27:08.942752 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6768dcf2-c875-4467-9da9-3857b2fdb2e3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.044575 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stnfv\" (UniqueName: \"kubernetes.io/projected/6768dcf2-c875-4467-9da9-3857b2fdb2e3-kube-api-access-stnfv\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.044667 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6768dcf2-c875-4467-9da9-3857b2fdb2e3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.044760 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.044881 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6768dcf2-c875-4467-9da9-3857b2fdb2e3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.045048 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6768dcf2-c875-4467-9da9-3857b2fdb2e3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.045072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6768dcf2-c875-4467-9da9-3857b2fdb2e3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.045106 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6768dcf2-c875-4467-9da9-3857b2fdb2e3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.045120 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6768dcf2-c875-4467-9da9-3857b2fdb2e3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.045136 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.048090 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6768dcf2-c875-4467-9da9-3857b2fdb2e3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.049015 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6768dcf2-c875-4467-9da9-3857b2fdb2e3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.050718 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6768dcf2-c875-4467-9da9-3857b2fdb2e3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.053512 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6768dcf2-c875-4467-9da9-3857b2fdb2e3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.084897 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6768dcf2-c875-4467-9da9-3857b2fdb2e3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.085053 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6768dcf2-c875-4467-9da9-3857b2fdb2e3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.105248 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stnfv\" (UniqueName: \"kubernetes.io/projected/6768dcf2-c875-4467-9da9-3857b2fdb2e3-kube-api-access-stnfv\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.114446 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.115455 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.119647 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6768dcf2-c875-4467-9da9-3857b2fdb2e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.136710 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-8x2v4" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.136923 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.137002 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.137024 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.167940 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.248327 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3be4e2c7-9acc-4491-a349-0bc788db0e9e-kolla-config\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.248374 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3be4e2c7-9acc-4491-a349-0bc788db0e9e-config-data\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.248467 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvntc\" (UniqueName: \"kubernetes.io/projected/3be4e2c7-9acc-4491-a349-0bc788db0e9e-kube-api-access-qvntc\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.248524 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3be4e2c7-9acc-4491-a349-0bc788db0e9e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.248556 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3be4e2c7-9acc-4491-a349-0bc788db0e9e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.319335 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feb938be-1a43-402f-8373-47a6c9217d9c","Type":"ContainerStarted","Data":"8d43c44db41e25197c3b8e88d856dfc07ecb34f2086b4b2495fa497924a28490"} Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.350166 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3be4e2c7-9acc-4491-a349-0bc788db0e9e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.350234 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3be4e2c7-9acc-4491-a349-0bc788db0e9e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.350305 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3be4e2c7-9acc-4491-a349-0bc788db0e9e-kolla-config\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.350366 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3be4e2c7-9acc-4491-a349-0bc788db0e9e-config-data\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.350444 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvntc\" (UniqueName: \"kubernetes.io/projected/3be4e2c7-9acc-4491-a349-0bc788db0e9e-kube-api-access-qvntc\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.352258 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3be4e2c7-9acc-4491-a349-0bc788db0e9e-config-data\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.352313 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3be4e2c7-9acc-4491-a349-0bc788db0e9e-kolla-config\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.381981 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvntc\" (UniqueName: \"kubernetes.io/projected/3be4e2c7-9acc-4491-a349-0bc788db0e9e-kube-api-access-qvntc\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.384019 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3be4e2c7-9acc-4491-a349-0bc788db0e9e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.384644 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3be4e2c7-9acc-4491-a349-0bc788db0e9e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"3be4e2c7-9acc-4491-a349-0bc788db0e9e\") " pod="openstack/memcached-0" Dec 05 12:27:09 crc kubenswrapper[4711]: I1205 12:27:09.490923 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 12:27:10 crc kubenswrapper[4711]: I1205 12:27:10.129731 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 12:27:10 crc kubenswrapper[4711]: I1205 12:27:10.193825 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 12:27:10 crc kubenswrapper[4711]: W1205 12:27:10.228315 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6768dcf2_c875_4467_9da9_3857b2fdb2e3.slice/crio-894c46611769f22a669edf32f19d8b3c49f4acf4b0bc30dd69350ae7826d673c WatchSource:0}: Error finding container 894c46611769f22a669edf32f19d8b3c49f4acf4b0bc30dd69350ae7826d673c: Status 404 returned error can't find the container with id 894c46611769f22a669edf32f19d8b3c49f4acf4b0bc30dd69350ae7826d673c Dec 05 12:27:10 crc kubenswrapper[4711]: I1205 12:27:10.350993 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6768dcf2-c875-4467-9da9-3857b2fdb2e3","Type":"ContainerStarted","Data":"894c46611769f22a669edf32f19d8b3c49f4acf4b0bc30dd69350ae7826d673c"} Dec 05 12:27:10 crc kubenswrapper[4711]: I1205 12:27:10.353904 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"3be4e2c7-9acc-4491-a349-0bc788db0e9e","Type":"ContainerStarted","Data":"e80ebe9284d78db62a9d7bb1e75eefe646d45a259f34a3420046d0a20d948b32"} Dec 05 12:27:11 crc kubenswrapper[4711]: I1205 12:27:11.435157 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:27:11 crc kubenswrapper[4711]: I1205 12:27:11.441960 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:27:11 crc kubenswrapper[4711]: I1205 12:27:11.448750 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:27:11 crc kubenswrapper[4711]: I1205 12:27:11.452534 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-7w5hh" Dec 05 12:27:11 crc kubenswrapper[4711]: I1205 12:27:11.538151 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25g4q\" (UniqueName: \"kubernetes.io/projected/5959fa07-c68d-41dc-ba4a-e68360ec28d2-kube-api-access-25g4q\") pod \"kube-state-metrics-0\" (UID: \"5959fa07-c68d-41dc-ba4a-e68360ec28d2\") " pod="openstack/kube-state-metrics-0" Dec 05 12:27:11 crc kubenswrapper[4711]: I1205 12:27:11.645598 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25g4q\" (UniqueName: \"kubernetes.io/projected/5959fa07-c68d-41dc-ba4a-e68360ec28d2-kube-api-access-25g4q\") pod \"kube-state-metrics-0\" (UID: \"5959fa07-c68d-41dc-ba4a-e68360ec28d2\") " pod="openstack/kube-state-metrics-0" Dec 05 12:27:11 crc kubenswrapper[4711]: I1205 12:27:11.715701 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25g4q\" (UniqueName: \"kubernetes.io/projected/5959fa07-c68d-41dc-ba4a-e68360ec28d2-kube-api-access-25g4q\") pod \"kube-state-metrics-0\" (UID: \"5959fa07-c68d-41dc-ba4a-e68360ec28d2\") " pod="openstack/kube-state-metrics-0" Dec 05 12:27:11 crc kubenswrapper[4711]: I1205 12:27:11.786909 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.709688 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:27:12 crc kubenswrapper[4711]: W1205 12:27:12.711706 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5959fa07_c68d_41dc_ba4a_e68360ec28d2.slice/crio-c38f6105aa79104bff0016fc2ecbb7268bd73bd64256c8d8df946c2d1b5588b1 WatchSource:0}: Error finding container c38f6105aa79104bff0016fc2ecbb7268bd73bd64256c8d8df946c2d1b5588b1: Status 404 returned error can't find the container with id c38f6105aa79104bff0016fc2ecbb7268bd73bd64256c8d8df946c2d1b5588b1 Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.770002 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.771985 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.775961 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.780619 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.780878 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.781677 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.784214 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.802472 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-jq6ps" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.828756 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.884272 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a5dc5cdb-751f-4754-af33-2985c29b98a3-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.884334 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.884549 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.884646 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.884694 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfghw\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-kube-api-access-tfghw\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.884826 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-config\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.884896 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.884941 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a5dc5cdb-751f-4754-af33-2985c29b98a3-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.986999 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.987159 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.987204 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfghw\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-kube-api-access-tfghw\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.987236 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-config\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.987269 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.987290 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a5dc5cdb-751f-4754-af33-2985c29b98a3-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.987347 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a5dc5cdb-751f-4754-af33-2985c29b98a3-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.987371 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.992853 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.993002 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a5dc5cdb-751f-4754-af33-2985c29b98a3-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.993255 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:12 crc kubenswrapper[4711]: I1205 12:27:12.994208 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a5dc5cdb-751f-4754-af33-2985c29b98a3-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.016943 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfghw\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-kube-api-access-tfghw\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.024088 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.025606 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-config\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.025678 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.025719 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0f9bd9950147f59c5b738a088845da7f6ecb4f45a3918a37eadb6099082c1159/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.088878 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.113811 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.509610 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5959fa07-c68d-41dc-ba4a-e68360ec28d2","Type":"ContainerStarted","Data":"c38f6105aa79104bff0016fc2ecbb7268bd73bd64256c8d8df946c2d1b5588b1"} Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.711488 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4wg9n"] Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.713020 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.718937 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.719271 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-949sh" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.719471 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.743109 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4wg9n"] Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.760626 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-28s5p"] Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.764500 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.793684 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-28s5p"] Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.802684 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/13a40f45-a612-477e-b883-94012252a457-var-log-ovn\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.802852 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7qhn\" (UniqueName: \"kubernetes.io/projected/13a40f45-a612-477e-b883-94012252a457-kube-api-access-n7qhn\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.802877 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/13a40f45-a612-477e-b883-94012252a457-ovn-controller-tls-certs\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.802903 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/13a40f45-a612-477e-b883-94012252a457-var-run-ovn\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.802959 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/13a40f45-a612-477e-b883-94012252a457-var-run\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.803012 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13a40f45-a612-477e-b883-94012252a457-scripts\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.803038 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13a40f45-a612-477e-b883-94012252a457-combined-ca-bundle\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904592 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/13a40f45-a612-477e-b883-94012252a457-var-run-ovn\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904664 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/13a40f45-a612-477e-b883-94012252a457-var-run\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904706 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-etc-ovs\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904738 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-var-lib\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904755 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13a40f45-a612-477e-b883-94012252a457-scripts\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904773 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-var-run\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904798 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13a40f45-a612-477e-b883-94012252a457-combined-ca-bundle\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904909 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmcst\" (UniqueName: \"kubernetes.io/projected/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-kube-api-access-kmcst\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904934 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-var-log\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.904968 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/13a40f45-a612-477e-b883-94012252a457-var-log-ovn\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.905015 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-scripts\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.905047 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7qhn\" (UniqueName: \"kubernetes.io/projected/13a40f45-a612-477e-b883-94012252a457-kube-api-access-n7qhn\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.905072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/13a40f45-a612-477e-b883-94012252a457-ovn-controller-tls-certs\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.905836 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/13a40f45-a612-477e-b883-94012252a457-var-run\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.906133 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/13a40f45-a612-477e-b883-94012252a457-var-log-ovn\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.906148 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/13a40f45-a612-477e-b883-94012252a457-var-run-ovn\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.908831 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13a40f45-a612-477e-b883-94012252a457-scripts\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.918736 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/13a40f45-a612-477e-b883-94012252a457-ovn-controller-tls-certs\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.928114 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13a40f45-a612-477e-b883-94012252a457-combined-ca-bundle\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:13 crc kubenswrapper[4711]: I1205 12:27:13.931599 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7qhn\" (UniqueName: \"kubernetes.io/projected/13a40f45-a612-477e-b883-94012252a457-kube-api-access-n7qhn\") pod \"ovn-controller-4wg9n\" (UID: \"13a40f45-a612-477e-b883-94012252a457\") " pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.006918 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-scripts\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.007036 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-etc-ovs\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.007073 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-var-lib\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.007097 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-var-run\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.007177 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmcst\" (UniqueName: \"kubernetes.io/projected/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-kube-api-access-kmcst\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.007205 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-var-log\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.007497 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-var-log\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.010068 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-var-lib\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.010171 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-etc-ovs\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.010219 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-var-run\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.011259 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-scripts\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.033991 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmcst\" (UniqueName: \"kubernetes.io/projected/0a1cbd77-9586-4e37-a172-cfe7ecda6c72-kube-api-access-kmcst\") pod \"ovn-controller-ovs-28s5p\" (UID: \"0a1cbd77-9586-4e37-a172-cfe7ecda6c72\") " pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.056096 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.095690 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4wg9n" Dec 05 12:27:14 crc kubenswrapper[4711]: I1205 12:27:14.120747 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:27:16 crc kubenswrapper[4711]: I1205 12:27:16.936324 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 12:27:16 crc kubenswrapper[4711]: I1205 12:27:16.937999 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:16 crc kubenswrapper[4711]: I1205 12:27:16.942673 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 12:27:16 crc kubenswrapper[4711]: I1205 12:27:16.943014 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 12:27:16 crc kubenswrapper[4711]: I1205 12:27:16.943207 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 12:27:16 crc kubenswrapper[4711]: I1205 12:27:16.943412 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 12:27:16 crc kubenswrapper[4711]: I1205 12:27:16.949356 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-lfwz5" Dec 05 12:27:16 crc kubenswrapper[4711]: I1205 12:27:16.962459 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.043335 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.043433 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7twbb\" (UniqueName: \"kubernetes.io/projected/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-kube-api-access-7twbb\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.043469 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.043534 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.043594 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.043726 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.043865 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.043902 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-config\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.145458 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.145545 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7twbb\" (UniqueName: \"kubernetes.io/projected/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-kube-api-access-7twbb\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.145583 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.145632 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.145666 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.145706 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.145758 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.145782 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-config\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.147097 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.147529 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.147574 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.148198 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-config\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.158314 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.167294 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.198599 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.198914 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7twbb\" (UniqueName: \"kubernetes.io/projected/2a1605ec-ad5c-4113-ac53-b8cf93bd5063-kube-api-access-7twbb\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.290430 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2a1605ec-ad5c-4113-ac53-b8cf93bd5063\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:17 crc kubenswrapper[4711]: I1205 12:27:17.570445 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 12:27:18 crc kubenswrapper[4711]: I1205 12:27:18.301480 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:27:18 crc kubenswrapper[4711]: I1205 12:27:18.301584 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:27:18 crc kubenswrapper[4711]: I1205 12:27:18.906542 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 12:27:18 crc kubenswrapper[4711]: I1205 12:27:18.907788 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:18 crc kubenswrapper[4711]: I1205 12:27:18.909669 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-6fmfc" Dec 05 12:27:18 crc kubenswrapper[4711]: I1205 12:27:18.909922 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 12:27:18 crc kubenswrapper[4711]: I1205 12:27:18.910128 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 12:27:18 crc kubenswrapper[4711]: I1205 12:27:18.914805 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 12:27:18 crc kubenswrapper[4711]: I1205 12:27:18.924669 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.002130 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d666928-c46a-4204-916c-231a43e82047-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.002224 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d666928-c46a-4204-916c-231a43e82047-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.002265 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d666928-c46a-4204-916c-231a43e82047-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.002286 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.002412 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d666928-c46a-4204-916c-231a43e82047-config\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.002699 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b77pg\" (UniqueName: \"kubernetes.io/projected/9d666928-c46a-4204-916c-231a43e82047-kube-api-access-b77pg\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.002813 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9d666928-c46a-4204-916c-231a43e82047-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.003028 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d666928-c46a-4204-916c-231a43e82047-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.104955 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d666928-c46a-4204-916c-231a43e82047-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.105027 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d666928-c46a-4204-916c-231a43e82047-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.105063 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d666928-c46a-4204-916c-231a43e82047-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.105103 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.105118 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d666928-c46a-4204-916c-231a43e82047-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.105141 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d666928-c46a-4204-916c-231a43e82047-config\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.105195 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b77pg\" (UniqueName: \"kubernetes.io/projected/9d666928-c46a-4204-916c-231a43e82047-kube-api-access-b77pg\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.105514 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9d666928-c46a-4204-916c-231a43e82047-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.105712 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.106368 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d666928-c46a-4204-916c-231a43e82047-config\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.106506 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9d666928-c46a-4204-916c-231a43e82047-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.106988 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d666928-c46a-4204-916c-231a43e82047-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.112331 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d666928-c46a-4204-916c-231a43e82047-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.114244 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d666928-c46a-4204-916c-231a43e82047-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.121041 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d666928-c46a-4204-916c-231a43e82047-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.138649 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.140036 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b77pg\" (UniqueName: \"kubernetes.io/projected/9d666928-c46a-4204-916c-231a43e82047-kube-api-access-b77pg\") pod \"ovsdbserver-sb-0\" (UID: \"9d666928-c46a-4204-916c-231a43e82047\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:19 crc kubenswrapper[4711]: I1205 12:27:19.236985 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 12:27:38 crc kubenswrapper[4711]: W1205 12:27:38.828633 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5dc5cdb_751f_4754_af33_2985c29b98a3.slice/crio-fb35dc6be59db173e175f0d1a931def1e676e6e3f3d1d837ee28ba7a83b60b10 WatchSource:0}: Error finding container fb35dc6be59db173e175f0d1a931def1e676e6e3f3d1d837ee28ba7a83b60b10: Status 404 returned error can't find the container with id fb35dc6be59db173e175f0d1a931def1e676e6e3f3d1d837ee28ba7a83b60b10 Dec 05 12:27:39 crc kubenswrapper[4711]: I1205 12:27:39.807912 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerStarted","Data":"fb35dc6be59db173e175f0d1a931def1e676e6e3f3d1d837ee28ba7a83b60b10"} Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.828299 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.829968 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.830231 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zz9lp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(bff43887-0cb0-4da0-a16f-6264877c473e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.831541 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.839194 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.839246 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.839401 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-czcns,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(c5ef8db4-5a97-483c-a168-bcf6368849a2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.840601 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.857748 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" Dec 05 12:27:45 crc kubenswrapper[4711]: E1205 12:27:45.857928 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest\\\"\"" pod="openstack/rabbitmq-server-0" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" Dec 05 12:27:48 crc kubenswrapper[4711]: I1205 12:27:48.300477 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:27:48 crc kubenswrapper[4711]: I1205 12:27:48.300797 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:27:51 crc kubenswrapper[4711]: E1205 12:27:51.824144 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:27:51 crc kubenswrapper[4711]: E1205 12:27:51.824539 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:27:51 crc kubenswrapper[4711]: E1205 12:27:51.824674 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nrl4w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-notifications-server-0_openstack(d1b49e15-30ab-4ef7-8980-436468104f7b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:27:51 crc kubenswrapper[4711]: E1205 12:27:51.826293 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-notifications-server-0" podUID="d1b49e15-30ab-4ef7-8980-436468104f7b" Dec 05 12:27:51 crc kubenswrapper[4711]: E1205 12:27:51.927993 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest\\\"\"" pod="openstack/rabbitmq-notifications-server-0" podUID="d1b49e15-30ab-4ef7-8980-436468104f7b" Dec 05 12:27:54 crc kubenswrapper[4711]: E1205 12:27:54.921373 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-mariadb:watcher_latest" Dec 05 12:27:54 crc kubenswrapper[4711]: E1205 12:27:54.921762 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-mariadb:watcher_latest" Dec 05 12:27:54 crc kubenswrapper[4711]: E1205 12:27:54.921881 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:38.102.83.20:5001/podified-master-centos10/openstack-mariadb:watcher_latest,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-stnfv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(6768dcf2-c875-4467-9da9-3857b2fdb2e3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:27:54 crc kubenswrapper[4711]: E1205 12:27:54.923028 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="6768dcf2-c875-4467-9da9-3857b2fdb2e3" Dec 05 12:27:54 crc kubenswrapper[4711]: E1205 12:27:54.952154 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-mariadb:watcher_latest\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="6768dcf2-c875-4467-9da9-3857b2fdb2e3" Dec 05 12:27:54 crc kubenswrapper[4711]: E1205 12:27:54.987804 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-mariadb:watcher_latest" Dec 05 12:27:54 crc kubenswrapper[4711]: E1205 12:27:54.987868 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-mariadb:watcher_latest" Dec 05 12:27:54 crc kubenswrapper[4711]: E1205 12:27:54.987999 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:38.102.83.20:5001/podified-master-centos10/openstack-mariadb:watcher_latest,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gck2p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(feb938be-1a43-402f-8373-47a6c9217d9c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:27:54 crc kubenswrapper[4711]: E1205 12:27:54.989314 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="feb938be-1a43-402f-8373-47a6c9217d9c" Dec 05 12:27:55 crc kubenswrapper[4711]: E1205 12:27:55.961936 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-mariadb:watcher_latest\\\"\"" pod="openstack/openstack-galera-0" podUID="feb938be-1a43-402f-8373-47a6c9217d9c" Dec 05 12:27:58 crc kubenswrapper[4711]: E1205 12:27:58.982193 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:27:58 crc kubenswrapper[4711]: E1205 12:27:58.984017 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:27:58 crc kubenswrapper[4711]: E1205 12:27:58.984370 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qqlv9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-bf86d7f99-djjb6_openstack(42f938ad-b496-4167-831d-ea2f3f865785): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:27:58 crc kubenswrapper[4711]: E1205 12:27:58.986009 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" podUID="42f938ad-b496-4167-831d-ea2f3f865785" Dec 05 12:27:59 crc kubenswrapper[4711]: E1205 12:27:59.256766 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:27:59 crc kubenswrapper[4711]: E1205 12:27:59.256844 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:27:59 crc kubenswrapper[4711]: E1205 12:27:59.257029 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5c7h56dh5cfh8bh54fhbbhf4h5b9hdch67fhd7h55fh55fh6ch9h548h54ch665h647h6h8fhd6h5dfh5cdh58bh577h66fh695h5fbh55h77h5fcq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wmv9g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-86fbff885-v6wq7_openstack(b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:27:59 crc kubenswrapper[4711]: E1205 12:27:59.258314 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-86fbff885-v6wq7" podUID="b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" Dec 05 12:27:59 crc kubenswrapper[4711]: I1205 12:27:59.331218 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4wg9n"] Dec 05 12:27:59 crc kubenswrapper[4711]: E1205 12:27:59.991539 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest\\\"\"" pod="openstack/dnsmasq-dns-86fbff885-v6wq7" podUID="b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" Dec 05 12:27:59 crc kubenswrapper[4711]: E1205 12:27:59.993702 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest\\\"\"" pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" podUID="42f938ad-b496-4167-831d-ea2f3f865785" Dec 05 12:28:00 crc kubenswrapper[4711]: W1205 12:28:00.907159 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13a40f45_a612_477e_b883_94012252a457.slice/crio-04513a31bc633f3a2cb451f2d4485646037257655814e806e2614272fb5df7ae WatchSource:0}: Error finding container 04513a31bc633f3a2cb451f2d4485646037257655814e806e2614272fb5df7ae: Status 404 returned error can't find the container with id 04513a31bc633f3a2cb451f2d4485646037257655814e806e2614272fb5df7ae Dec 05 12:28:01 crc kubenswrapper[4711]: I1205 12:28:01.005772 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4wg9n" event={"ID":"13a40f45-a612-477e-b883-94012252a457","Type":"ContainerStarted","Data":"04513a31bc633f3a2cb451f2d4485646037257655814e806e2614272fb5df7ae"} Dec 05 12:28:01 crc kubenswrapper[4711]: E1205 12:28:01.063456 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:28:01 crc kubenswrapper[4711]: E1205 12:28:01.063504 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:28:01 crc kubenswrapper[4711]: E1205 12:28:01.063617 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rbztk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57b9d58665-d8xpl_openstack(34ca27ad-57c4-404a-b1f1-7590f648e9fa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:28:01 crc kubenswrapper[4711]: E1205 12:28:01.064962 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" podUID="34ca27ad-57c4-404a-b1f1-7590f648e9fa" Dec 05 12:28:01 crc kubenswrapper[4711]: E1205 12:28:01.271421 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:28:01 crc kubenswrapper[4711]: E1205 12:28:01.271484 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:28:01 crc kubenswrapper[4711]: E1205 12:28:01.271604 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2fsmg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78d759bd75-hxcbk_openstack(941bc276-a319-4746-9fc2-be30d07cde1f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:28:01 crc kubenswrapper[4711]: E1205 12:28:01.272810 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" podUID="941bc276-a319-4746-9fc2-be30d07cde1f" Dec 05 12:28:01 crc kubenswrapper[4711]: I1205 12:28:01.444143 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 12:28:01 crc kubenswrapper[4711]: W1205 12:28:01.452540 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a1605ec_ad5c_4113_ac53_b8cf93bd5063.slice/crio-318d77521980d1e0d14e666f08432f8b48f5af7f4c8df793852a00bd4f8432d4 WatchSource:0}: Error finding container 318d77521980d1e0d14e666f08432f8b48f5af7f4c8df793852a00bd4f8432d4: Status 404 returned error can't find the container with id 318d77521980d1e0d14e666f08432f8b48f5af7f4c8df793852a00bd4f8432d4 Dec 05 12:28:01 crc kubenswrapper[4711]: I1205 12:28:01.627578 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-28s5p"] Dec 05 12:28:01 crc kubenswrapper[4711]: W1205 12:28:01.628094 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a1cbd77_9586_4e37_a172_cfe7ecda6c72.slice/crio-30352db59a346be6a70658cdbf1f86496f44fa1ee22ddef82ab3a37c5fbcc826 WatchSource:0}: Error finding container 30352db59a346be6a70658cdbf1f86496f44fa1ee22ddef82ab3a37c5fbcc826: Status 404 returned error can't find the container with id 30352db59a346be6a70658cdbf1f86496f44fa1ee22ddef82ab3a37c5fbcc826 Dec 05 12:28:01 crc kubenswrapper[4711]: W1205 12:28:01.825824 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d666928_c46a_4204_916c_231a43e82047.slice/crio-5640d0b2ef8c32d96b145a50b0c4d29635946c56195f68327361fcf2441c050f WatchSource:0}: Error finding container 5640d0b2ef8c32d96b145a50b0c4d29635946c56195f68327361fcf2441c050f: Status 404 returned error can't find the container with id 5640d0b2ef8c32d96b145a50b0c4d29635946c56195f68327361fcf2441c050f Dec 05 12:28:01 crc kubenswrapper[4711]: I1205 12:28:01.826860 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.013599 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-28s5p" event={"ID":"0a1cbd77-9586-4e37-a172-cfe7ecda6c72","Type":"ContainerStarted","Data":"30352db59a346be6a70658cdbf1f86496f44fa1ee22ddef82ab3a37c5fbcc826"} Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.016264 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2a1605ec-ad5c-4113-ac53-b8cf93bd5063","Type":"ContainerStarted","Data":"318d77521980d1e0d14e666f08432f8b48f5af7f4c8df793852a00bd4f8432d4"} Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.017847 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9d666928-c46a-4204-916c-231a43e82047","Type":"ContainerStarted","Data":"5640d0b2ef8c32d96b145a50b0c4d29635946c56195f68327361fcf2441c050f"} Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.587179 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:28:02 crc kubenswrapper[4711]: E1205 12:28:02.615283 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:28:02 crc kubenswrapper[4711]: E1205 12:28:02.615353 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:28:02 crc kubenswrapper[4711]: E1205 12:28:02.615535 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-szdvb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7bb9bf987-q2zfm_openstack(b706c014-e61e-4b7a-946f-8f9a05a31dd7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:28:02 crc kubenswrapper[4711]: E1205 12:28:02.616784 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" podUID="b706c014-e61e-4b7a-946f-8f9a05a31dd7" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.641635 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fsmg\" (UniqueName: \"kubernetes.io/projected/941bc276-a319-4746-9fc2-be30d07cde1f-kube-api-access-2fsmg\") pod \"941bc276-a319-4746-9fc2-be30d07cde1f\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.641786 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-dns-svc\") pod \"941bc276-a319-4746-9fc2-be30d07cde1f\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.641822 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-config\") pod \"941bc276-a319-4746-9fc2-be30d07cde1f\" (UID: \"941bc276-a319-4746-9fc2-be30d07cde1f\") " Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.642363 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-config" (OuterVolumeSpecName: "config") pod "941bc276-a319-4746-9fc2-be30d07cde1f" (UID: "941bc276-a319-4746-9fc2-be30d07cde1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.642416 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "941bc276-a319-4746-9fc2-be30d07cde1f" (UID: "941bc276-a319-4746-9fc2-be30d07cde1f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.704816 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/941bc276-a319-4746-9fc2-be30d07cde1f-kube-api-access-2fsmg" (OuterVolumeSpecName: "kube-api-access-2fsmg") pod "941bc276-a319-4746-9fc2-be30d07cde1f" (UID: "941bc276-a319-4746-9fc2-be30d07cde1f"). InnerVolumeSpecName "kube-api-access-2fsmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.743449 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.743482 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/941bc276-a319-4746-9fc2-be30d07cde1f-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.743497 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fsmg\" (UniqueName: \"kubernetes.io/projected/941bc276-a319-4746-9fc2-be30d07cde1f-kube-api-access-2fsmg\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.818940 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.846242 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbztk\" (UniqueName: \"kubernetes.io/projected/34ca27ad-57c4-404a-b1f1-7590f648e9fa-kube-api-access-rbztk\") pod \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\" (UID: \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\") " Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.846341 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ca27ad-57c4-404a-b1f1-7590f648e9fa-config\") pod \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\" (UID: \"34ca27ad-57c4-404a-b1f1-7590f648e9fa\") " Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.848208 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34ca27ad-57c4-404a-b1f1-7590f648e9fa-config" (OuterVolumeSpecName: "config") pod "34ca27ad-57c4-404a-b1f1-7590f648e9fa" (UID: "34ca27ad-57c4-404a-b1f1-7590f648e9fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.849411 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ca27ad-57c4-404a-b1f1-7590f648e9fa-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.858173 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34ca27ad-57c4-404a-b1f1-7590f648e9fa-kube-api-access-rbztk" (OuterVolumeSpecName: "kube-api-access-rbztk") pod "34ca27ad-57c4-404a-b1f1-7590f648e9fa" (UID: "34ca27ad-57c4-404a-b1f1-7590f648e9fa"). InnerVolumeSpecName "kube-api-access-rbztk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:02 crc kubenswrapper[4711]: I1205 12:28:02.950877 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbztk\" (UniqueName: \"kubernetes.io/projected/34ca27ad-57c4-404a-b1f1-7590f648e9fa-kube-api-access-rbztk\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.029807 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" event={"ID":"34ca27ad-57c4-404a-b1f1-7590f648e9fa","Type":"ContainerDied","Data":"ea455765d74effff0033a1a6b9f12f0e6d2e47bfd7c79576d03eed0d235e952e"} Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.029853 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57b9d58665-d8xpl" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.031115 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" event={"ID":"941bc276-a319-4746-9fc2-be30d07cde1f","Type":"ContainerDied","Data":"dfad8c492224b0228920e03e2e76992a88100834f3ba616c241047e641a2f29f"} Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.031175 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d759bd75-hxcbk" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.033602 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5ef8db4-5a97-483c-a168-bcf6368849a2","Type":"ContainerStarted","Data":"47f67c484cab171bc640a44e92ecd16dea282d80e2d48d886adc47eb22d914fe"} Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.035905 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"3be4e2c7-9acc-4491-a349-0bc788db0e9e","Type":"ContainerStarted","Data":"e2d7af5b53e47d83b00b6ccf7588865e9be4a4aa1766842117f217bc227e1934"} Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.036066 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.088275 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.9386532 podStartE2EDuration="54.088246107s" podCreationTimestamp="2025-12-05 12:27:09 +0000 UTC" firstStartedPulling="2025-12-05 12:27:10.165440018 +0000 UTC m=+1075.749762348" lastFinishedPulling="2025-12-05 12:28:01.315032925 +0000 UTC m=+1126.899355255" observedRunningTime="2025-12-05 12:28:03.081417209 +0000 UTC m=+1128.665739559" watchObservedRunningTime="2025-12-05 12:28:03.088246107 +0000 UTC m=+1128.672568437" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.177886 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57b9d58665-d8xpl"] Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.187544 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57b9d58665-d8xpl"] Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.224507 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78d759bd75-hxcbk"] Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.234730 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78d759bd75-hxcbk"] Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.395950 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.564762 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-dns-svc\") pod \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.564823 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szdvb\" (UniqueName: \"kubernetes.io/projected/b706c014-e61e-4b7a-946f-8f9a05a31dd7-kube-api-access-szdvb\") pod \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.564921 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-config\") pod \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\" (UID: \"b706c014-e61e-4b7a-946f-8f9a05a31dd7\") " Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.565567 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b706c014-e61e-4b7a-946f-8f9a05a31dd7" (UID: "b706c014-e61e-4b7a-946f-8f9a05a31dd7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.566025 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-config" (OuterVolumeSpecName: "config") pod "b706c014-e61e-4b7a-946f-8f9a05a31dd7" (UID: "b706c014-e61e-4b7a-946f-8f9a05a31dd7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.602203 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b706c014-e61e-4b7a-946f-8f9a05a31dd7-kube-api-access-szdvb" (OuterVolumeSpecName: "kube-api-access-szdvb") pod "b706c014-e61e-4b7a-946f-8f9a05a31dd7" (UID: "b706c014-e61e-4b7a-946f-8f9a05a31dd7"). InnerVolumeSpecName "kube-api-access-szdvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.666479 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.666516 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szdvb\" (UniqueName: \"kubernetes.io/projected/b706c014-e61e-4b7a-946f-8f9a05a31dd7-kube-api-access-szdvb\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:03 crc kubenswrapper[4711]: I1205 12:28:03.666551 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b706c014-e61e-4b7a-946f-8f9a05a31dd7-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:04 crc kubenswrapper[4711]: I1205 12:28:04.046550 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" event={"ID":"b706c014-e61e-4b7a-946f-8f9a05a31dd7","Type":"ContainerDied","Data":"98247649b213931c81a241cbdebd015d011098412af85fca8043919b0e525769"} Dec 05 12:28:04 crc kubenswrapper[4711]: I1205 12:28:04.046696 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bb9bf987-q2zfm" Dec 05 12:28:04 crc kubenswrapper[4711]: I1205 12:28:04.115626 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bb9bf987-q2zfm"] Dec 05 12:28:04 crc kubenswrapper[4711]: I1205 12:28:04.130202 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bb9bf987-q2zfm"] Dec 05 12:28:04 crc kubenswrapper[4711]: I1205 12:28:04.695443 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34ca27ad-57c4-404a-b1f1-7590f648e9fa" path="/var/lib/kubelet/pods/34ca27ad-57c4-404a-b1f1-7590f648e9fa/volumes" Dec 05 12:28:04 crc kubenswrapper[4711]: I1205 12:28:04.696026 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="941bc276-a319-4746-9fc2-be30d07cde1f" path="/var/lib/kubelet/pods/941bc276-a319-4746-9fc2-be30d07cde1f/volumes" Dec 05 12:28:04 crc kubenswrapper[4711]: I1205 12:28:04.696817 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b706c014-e61e-4b7a-946f-8f9a05a31dd7" path="/var/lib/kubelet/pods/b706c014-e61e-4b7a-946f-8f9a05a31dd7/volumes" Dec 05 12:28:05 crc kubenswrapper[4711]: I1205 12:28:05.054406 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerStarted","Data":"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194"} Dec 05 12:28:05 crc kubenswrapper[4711]: I1205 12:28:05.055936 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"d1b49e15-30ab-4ef7-8980-436468104f7b","Type":"ContainerStarted","Data":"f1eee60c73d3476b4b977ec3bf5f33cedee51a31851ffdb613d2bff848f30625"} Dec 05 12:28:05 crc kubenswrapper[4711]: I1205 12:28:05.057307 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bff43887-0cb0-4da0-a16f-6264877c473e","Type":"ContainerStarted","Data":"b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec"} Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.091728 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4wg9n" event={"ID":"13a40f45-a612-477e-b883-94012252a457","Type":"ContainerStarted","Data":"804f7220b71cbc4f8494a8735e94721fddddf5966c05c06bce7b37786c455e88"} Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.092235 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-4wg9n" Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.093647 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9d666928-c46a-4204-916c-231a43e82047","Type":"ContainerStarted","Data":"c37dea52963c82921ecb69fb6364f59b9ed4beada5f8cd1cee69d94769727ba3"} Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.097114 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5959fa07-c68d-41dc-ba4a-e68360ec28d2","Type":"ContainerStarted","Data":"badd9af53c97661086d8fd448889c0a4f1bb2ac5889f4182c69f6365348282c3"} Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.097330 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.099991 4711 generic.go:334] "Generic (PLEG): container finished" podID="0a1cbd77-9586-4e37-a172-cfe7ecda6c72" containerID="4f54f84f296c44116069c02eff6e9efddefe20298a05496b9d79ca4789dfa9d6" exitCode=0 Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.100042 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-28s5p" event={"ID":"0a1cbd77-9586-4e37-a172-cfe7ecda6c72","Type":"ContainerDied","Data":"4f54f84f296c44116069c02eff6e9efddefe20298a05496b9d79ca4789dfa9d6"} Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.105628 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6768dcf2-c875-4467-9da9-3857b2fdb2e3","Type":"ContainerStarted","Data":"e7d622f6c154881eefb9bcda895deb00e64356587e50311045e9f6e0cfc9a061"} Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.112856 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2a1605ec-ad5c-4113-ac53-b8cf93bd5063","Type":"ContainerStarted","Data":"ffb2773d991ba40c1fbd4dcabb7491ca37b1e117711bbc4c9734bf683786b50c"} Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.114286 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-4wg9n" podStartSLOduration=50.062412542 podStartE2EDuration="56.114264388s" podCreationTimestamp="2025-12-05 12:27:13 +0000 UTC" firstStartedPulling="2025-12-05 12:28:01.20629442 +0000 UTC m=+1126.790616750" lastFinishedPulling="2025-12-05 12:28:07.258146266 +0000 UTC m=+1132.842468596" observedRunningTime="2025-12-05 12:28:09.108494925 +0000 UTC m=+1134.692817255" watchObservedRunningTime="2025-12-05 12:28:09.114264388 +0000 UTC m=+1134.698586718" Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.129095 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.858755616 podStartE2EDuration="58.1290731s" podCreationTimestamp="2025-12-05 12:27:11 +0000 UTC" firstStartedPulling="2025-12-05 12:27:12.714895359 +0000 UTC m=+1078.299217689" lastFinishedPulling="2025-12-05 12:28:07.985212833 +0000 UTC m=+1133.569535173" observedRunningTime="2025-12-05 12:28:09.128531837 +0000 UTC m=+1134.712854187" watchObservedRunningTime="2025-12-05 12:28:09.1290731 +0000 UTC m=+1134.713395450" Dec 05 12:28:09 crc kubenswrapper[4711]: I1205 12:28:09.492684 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 12:28:10 crc kubenswrapper[4711]: I1205 12:28:10.130198 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-28s5p" event={"ID":"0a1cbd77-9586-4e37-a172-cfe7ecda6c72","Type":"ContainerStarted","Data":"5717d8c439e292c37ecbcbdfbe5f47272a031a6b52df8f30f0441ebb92f0c7eb"} Dec 05 12:28:10 crc kubenswrapper[4711]: I1205 12:28:10.130268 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-28s5p" event={"ID":"0a1cbd77-9586-4e37-a172-cfe7ecda6c72","Type":"ContainerStarted","Data":"a2affd897e21cd6275548780aa60da2ad29a6df1bc1dba6dca19ab986af68e6e"} Dec 05 12:28:10 crc kubenswrapper[4711]: I1205 12:28:10.131470 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:28:10 crc kubenswrapper[4711]: I1205 12:28:10.131510 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:28:10 crc kubenswrapper[4711]: I1205 12:28:10.708989 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-28s5p" podStartSLOduration=52.182185949 podStartE2EDuration="57.708969341s" podCreationTimestamp="2025-12-05 12:27:13 +0000 UTC" firstStartedPulling="2025-12-05 12:28:01.630025369 +0000 UTC m=+1127.214347699" lastFinishedPulling="2025-12-05 12:28:07.156808761 +0000 UTC m=+1132.741131091" observedRunningTime="2025-12-05 12:28:10.160659155 +0000 UTC m=+1135.744981495" watchObservedRunningTime="2025-12-05 12:28:10.708969341 +0000 UTC m=+1136.293291671" Dec 05 12:28:11 crc kubenswrapper[4711]: I1205 12:28:11.857694 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86fbff885-v6wq7"] Dec 05 12:28:11 crc kubenswrapper[4711]: I1205 12:28:11.916721 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc455bbff-m65tc"] Dec 05 12:28:11 crc kubenswrapper[4711]: I1205 12:28:11.918209 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:11 crc kubenswrapper[4711]: I1205 12:28:11.942818 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc455bbff-m65tc"] Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.045390 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-config\") pod \"dnsmasq-dns-6bc455bbff-m65tc\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.045551 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n27w8\" (UniqueName: \"kubernetes.io/projected/c3d58992-44b1-4f63-9574-795a557bcf96-kube-api-access-n27w8\") pod \"dnsmasq-dns-6bc455bbff-m65tc\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.045624 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-dns-svc\") pod \"dnsmasq-dns-6bc455bbff-m65tc\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.146750 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n27w8\" (UniqueName: \"kubernetes.io/projected/c3d58992-44b1-4f63-9574-795a557bcf96-kube-api-access-n27w8\") pod \"dnsmasq-dns-6bc455bbff-m65tc\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.146794 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-dns-svc\") pod \"dnsmasq-dns-6bc455bbff-m65tc\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.146929 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-config\") pod \"dnsmasq-dns-6bc455bbff-m65tc\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.147488 4711 generic.go:334] "Generic (PLEG): container finished" podID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerID="3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194" exitCode=0 Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.147723 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerDied","Data":"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194"} Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.147816 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-config\") pod \"dnsmasq-dns-6bc455bbff-m65tc\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.147892 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-dns-svc\") pod \"dnsmasq-dns-6bc455bbff-m65tc\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.169256 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n27w8\" (UniqueName: \"kubernetes.io/projected/c3d58992-44b1-4f63-9574-795a557bcf96-kube-api-access-n27w8\") pod \"dnsmasq-dns-6bc455bbff-m65tc\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.297747 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:12 crc kubenswrapper[4711]: I1205 12:28:12.789845 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc455bbff-m65tc"] Dec 05 12:28:12 crc kubenswrapper[4711]: W1205 12:28:12.800415 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3d58992_44b1_4f63_9574_795a557bcf96.slice/crio-0a28c41ec03e507a0d723e112b5e71d135b0db31dfcf14af5633beecdf2859ce WatchSource:0}: Error finding container 0a28c41ec03e507a0d723e112b5e71d135b0db31dfcf14af5633beecdf2859ce: Status 404 returned error can't find the container with id 0a28c41ec03e507a0d723e112b5e71d135b0db31dfcf14af5633beecdf2859ce Dec 05 12:28:12 crc kubenswrapper[4711]: E1205 12:28:12.962923 4711 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 05 12:28:12 crc kubenswrapper[4711]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/42f938ad-b496-4167-831d-ea2f3f865785/volume-subpaths/dns-svc/init/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 12:28:12 crc kubenswrapper[4711]: > podSandboxID="21cb03c743da6906ae58f5580b7c3dd2879e9257fd4fe97633ce21ec6a65626e" Dec 05 12:28:12 crc kubenswrapper[4711]: E1205 12:28:12.963196 4711 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 05 12:28:12 crc kubenswrapper[4711]: init container &Container{Name:init,Image:38.102.83.20:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qqlv9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-bf86d7f99-djjb6_openstack(42f938ad-b496-4167-831d-ea2f3f865785): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/42f938ad-b496-4167-831d-ea2f3f865785/volume-subpaths/dns-svc/init/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 12:28:12 crc kubenswrapper[4711]: > logger="UnhandledError" Dec 05 12:28:12 crc kubenswrapper[4711]: E1205 12:28:12.964467 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/42f938ad-b496-4167-831d-ea2f3f865785/volume-subpaths/dns-svc/init/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" podUID="42f938ad-b496-4167-831d-ea2f3f865785" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.002074 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.007123 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.009591 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.009732 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.009808 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.009860 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-rxrvr" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.025316 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.061267 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwpz7\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-kube-api-access-fwpz7\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.061418 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.061474 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/421d5855-985f-4d7f-9faf-c868088a7291-lock\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.061500 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.061668 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/421d5855-985f-4d7f-9faf-c868088a7291-cache\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.157349 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feb938be-1a43-402f-8373-47a6c9217d9c","Type":"ContainerStarted","Data":"d26802cf75fd94bc8cfd7d324385f87453dcf90d0150b3e945560d5c88c93547"} Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.159025 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2a1605ec-ad5c-4113-ac53-b8cf93bd5063","Type":"ContainerStarted","Data":"a2f81b0d80852566179ec5ffe910ceaea2016e6fec436570153b503ea0e47a6c"} Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.162185 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9d666928-c46a-4204-916c-231a43e82047","Type":"ContainerStarted","Data":"58dcfec0dbce5d71a8b621e6d7232f8e56aced4188d8dcf0aa518e68476c26c8"} Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.162686 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwpz7\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-kube-api-access-fwpz7\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.162769 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.162808 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/421d5855-985f-4d7f-9faf-c868088a7291-lock\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.162827 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.162848 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/421d5855-985f-4d7f-9faf-c868088a7291-cache\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.163341 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/421d5855-985f-4d7f-9faf-c868088a7291-cache\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.163411 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/421d5855-985f-4d7f-9faf-c868088a7291-lock\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: E1205 12:28:13.163432 4711 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:28:13 crc kubenswrapper[4711]: E1205 12:28:13.163449 4711 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:28:13 crc kubenswrapper[4711]: E1205 12:28:13.163493 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift podName:421d5855-985f-4d7f-9faf-c868088a7291 nodeName:}" failed. No retries permitted until 2025-12-05 12:28:13.663475987 +0000 UTC m=+1139.247798317 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift") pod "swift-storage-0" (UID: "421d5855-985f-4d7f-9faf-c868088a7291") : configmap "swift-ring-files" not found Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.163632 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.164452 4711 generic.go:334] "Generic (PLEG): container finished" podID="b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" containerID="7a3406259c98bd945bcfb262c301e585fd2135cbf964fccb6f5c04de02be8828" exitCode=0 Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.164513 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86fbff885-v6wq7" event={"ID":"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa","Type":"ContainerDied","Data":"7a3406259c98bd945bcfb262c301e585fd2135cbf964fccb6f5c04de02be8828"} Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.166507 4711 generic.go:334] "Generic (PLEG): container finished" podID="c3d58992-44b1-4f63-9574-795a557bcf96" containerID="52657d1a02bc3c3053e27fd05c5b9870468c34b394063a1613e2ae0168ec39aa" exitCode=0 Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.166540 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" event={"ID":"c3d58992-44b1-4f63-9574-795a557bcf96","Type":"ContainerDied","Data":"52657d1a02bc3c3053e27fd05c5b9870468c34b394063a1613e2ae0168ec39aa"} Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.166557 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" event={"ID":"c3d58992-44b1-4f63-9574-795a557bcf96","Type":"ContainerStarted","Data":"0a28c41ec03e507a0d723e112b5e71d135b0db31dfcf14af5633beecdf2859ce"} Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.187427 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwpz7\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-kube-api-access-fwpz7\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.191066 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.237430 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.261937 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=48.033902734 podStartE2EDuration="58.26191559s" podCreationTimestamp="2025-12-05 12:27:15 +0000 UTC" firstStartedPulling="2025-12-05 12:28:01.456938286 +0000 UTC m=+1127.041260616" lastFinishedPulling="2025-12-05 12:28:11.684951142 +0000 UTC m=+1137.269273472" observedRunningTime="2025-12-05 12:28:13.241159511 +0000 UTC m=+1138.825481831" watchObservedRunningTime="2025-12-05 12:28:13.26191559 +0000 UTC m=+1138.846237930" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.269764 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=46.36733135 podStartE2EDuration="56.269746702s" podCreationTimestamp="2025-12-05 12:27:17 +0000 UTC" firstStartedPulling="2025-12-05 12:28:01.827953573 +0000 UTC m=+1127.412275903" lastFinishedPulling="2025-12-05 12:28:11.730368925 +0000 UTC m=+1137.314691255" observedRunningTime="2025-12-05 12:28:13.261556972 +0000 UTC m=+1138.845879322" watchObservedRunningTime="2025-12-05 12:28:13.269746702 +0000 UTC m=+1138.854069052" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.302279 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.508436 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.669234 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-dns-svc\") pod \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.669305 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-config\") pod \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.669413 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmv9g\" (UniqueName: \"kubernetes.io/projected/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-kube-api-access-wmv9g\") pod \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\" (UID: \"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa\") " Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.669823 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:13 crc kubenswrapper[4711]: E1205 12:28:13.670025 4711 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:28:13 crc kubenswrapper[4711]: E1205 12:28:13.670062 4711 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:28:13 crc kubenswrapper[4711]: E1205 12:28:13.670130 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift podName:421d5855-985f-4d7f-9faf-c868088a7291 nodeName:}" failed. No retries permitted until 2025-12-05 12:28:14.670109739 +0000 UTC m=+1140.254432069 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift") pod "swift-storage-0" (UID: "421d5855-985f-4d7f-9faf-c868088a7291") : configmap "swift-ring-files" not found Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.674456 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-kube-api-access-wmv9g" (OuterVolumeSpecName: "kube-api-access-wmv9g") pod "b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" (UID: "b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa"). InnerVolumeSpecName "kube-api-access-wmv9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.688294 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" (UID: "b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.695731 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-config" (OuterVolumeSpecName: "config") pod "b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" (UID: "b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.771578 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.771614 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:13 crc kubenswrapper[4711]: I1205 12:28:13.771627 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmv9g\" (UniqueName: \"kubernetes.io/projected/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa-kube-api-access-wmv9g\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.179608 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" event={"ID":"c3d58992-44b1-4f63-9574-795a557bcf96","Type":"ContainerStarted","Data":"c8cc2c4b5d7f7ccdf423d05c64982721203806226ed5cdd18dab411b3bb5f1c2"} Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.179800 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.181619 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86fbff885-v6wq7" event={"ID":"b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa","Type":"ContainerDied","Data":"2146e563c3dffc7e1da7d98ee69aa7e3e1e0049d9fbba1b388fb5e751807ef0d"} Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.181655 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86fbff885-v6wq7" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.181660 4711 scope.go:117] "RemoveContainer" containerID="7a3406259c98bd945bcfb262c301e585fd2135cbf964fccb6f5c04de02be8828" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.181994 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.230087 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.245604 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" podStartSLOduration=3.24558937 podStartE2EDuration="3.24558937s" podCreationTimestamp="2025-12-05 12:28:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:14.200860334 +0000 UTC m=+1139.785182704" watchObservedRunningTime="2025-12-05 12:28:14.24558937 +0000 UTC m=+1139.829911700" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.304040 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86fbff885-v6wq7"] Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.316571 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86fbff885-v6wq7"] Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.539471 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bf86d7f99-djjb6"] Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.573215 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.612610 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c7c495775-4v5r4"] Dec 05 12:28:14 crc kubenswrapper[4711]: E1205 12:28:14.613129 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" containerName="init" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.613152 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" containerName="init" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.613422 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" containerName="init" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.614669 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.621112 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.633874 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c7c495775-4v5r4"] Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.652892 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-5b8rk"] Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.654034 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.658131 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.662646 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-5b8rk"] Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.666076 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.697637 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:14 crc kubenswrapper[4711]: E1205 12:28:14.697845 4711 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:28:14 crc kubenswrapper[4711]: E1205 12:28:14.697862 4711 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:28:14 crc kubenswrapper[4711]: E1205 12:28:14.697906 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift podName:421d5855-985f-4d7f-9faf-c868088a7291 nodeName:}" failed. No retries permitted until 2025-12-05 12:28:16.697889721 +0000 UTC m=+1142.282212051 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift") pod "swift-storage-0" (UID: "421d5855-985f-4d7f-9faf-c868088a7291") : configmap "swift-ring-files" not found Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.707158 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa" path="/var/lib/kubelet/pods/b1fd4cf0-c722-48e3-96a9-0762e1dfa9aa/volumes" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801500 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-dns-svc\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801533 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh8s7\" (UniqueName: \"kubernetes.io/projected/c53fd197-4405-433e-ab39-a71e4248b5b1-kube-api-access-bh8s7\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801595 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjkls\" (UniqueName: \"kubernetes.io/projected/1939bf8a-af4b-40df-b6a3-390a44292cfb-kube-api-access-fjkls\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801630 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-ovsdbserver-sb\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801664 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1939bf8a-af4b-40df-b6a3-390a44292cfb-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801700 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1939bf8a-af4b-40df-b6a3-390a44292cfb-ovs-rundir\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801727 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1939bf8a-af4b-40df-b6a3-390a44292cfb-ovn-rundir\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801757 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1939bf8a-af4b-40df-b6a3-390a44292cfb-config\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801931 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1939bf8a-af4b-40df-b6a3-390a44292cfb-combined-ca-bundle\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.801969 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-config\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.833883 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc455bbff-m65tc"] Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.886772 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55ffbfff47-b5jn7"] Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.888191 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.895520 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.898192 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55ffbfff47-b5jn7"] Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904188 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-dns-svc\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904230 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh8s7\" (UniqueName: \"kubernetes.io/projected/c53fd197-4405-433e-ab39-a71e4248b5b1-kube-api-access-bh8s7\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904258 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjkls\" (UniqueName: \"kubernetes.io/projected/1939bf8a-af4b-40df-b6a3-390a44292cfb-kube-api-access-fjkls\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904276 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-ovsdbserver-sb\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904295 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1939bf8a-af4b-40df-b6a3-390a44292cfb-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904313 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1939bf8a-af4b-40df-b6a3-390a44292cfb-ovs-rundir\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904334 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1939bf8a-af4b-40df-b6a3-390a44292cfb-ovn-rundir\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904352 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1939bf8a-af4b-40df-b6a3-390a44292cfb-config\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904436 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1939bf8a-af4b-40df-b6a3-390a44292cfb-combined-ca-bundle\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.904461 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-config\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.905324 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-config\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.905926 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-dns-svc\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.906182 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1939bf8a-af4b-40df-b6a3-390a44292cfb-ovs-rundir\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.906240 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1939bf8a-af4b-40df-b6a3-390a44292cfb-ovn-rundir\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.908595 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-ovsdbserver-sb\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.918853 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1939bf8a-af4b-40df-b6a3-390a44292cfb-config\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.927381 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1939bf8a-af4b-40df-b6a3-390a44292cfb-combined-ca-bundle\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.928111 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1939bf8a-af4b-40df-b6a3-390a44292cfb-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.948571 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh8s7\" (UniqueName: \"kubernetes.io/projected/c53fd197-4405-433e-ab39-a71e4248b5b1-kube-api-access-bh8s7\") pod \"dnsmasq-dns-6c7c495775-4v5r4\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.954791 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.982244 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjkls\" (UniqueName: \"kubernetes.io/projected/1939bf8a-af4b-40df-b6a3-390a44292cfb-kube-api-access-fjkls\") pod \"ovn-controller-metrics-5b8rk\" (UID: \"1939bf8a-af4b-40df-b6a3-390a44292cfb\") " pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:14 crc kubenswrapper[4711]: I1205 12:28:14.982681 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-5b8rk" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.010785 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-dns-svc\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.010940 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-config\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.011047 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlc68\" (UniqueName: \"kubernetes.io/projected/9db9755e-38c2-44d8-a728-424a4ff924a0-kube-api-access-vlc68\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.011116 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-sb\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.011213 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-nb\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.113298 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-config\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.113424 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlc68\" (UniqueName: \"kubernetes.io/projected/9db9755e-38c2-44d8-a728-424a4ff924a0-kube-api-access-vlc68\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.113452 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-sb\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.113511 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-nb\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.113542 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-dns-svc\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.120031 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.150136 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.151484 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-config\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.152163 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-sb\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.153264 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlc68\" (UniqueName: \"kubernetes.io/projected/9db9755e-38c2-44d8-a728-424a4ff924a0-kube-api-access-vlc68\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.162311 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-nb\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.165456 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-dns-svc\") pod \"dnsmasq-dns-55ffbfff47-b5jn7\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.195230 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" event={"ID":"42f938ad-b496-4167-831d-ea2f3f865785","Type":"ContainerDied","Data":"21cb03c743da6906ae58f5580b7c3dd2879e9257fd4fe97633ce21ec6a65626e"} Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.196613 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf86d7f99-djjb6" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.200759 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.263234 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.317354 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-dns-svc\") pod \"42f938ad-b496-4167-831d-ea2f3f865785\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.317729 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-config\") pod \"42f938ad-b496-4167-831d-ea2f3f865785\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.318215 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqlv9\" (UniqueName: \"kubernetes.io/projected/42f938ad-b496-4167-831d-ea2f3f865785-kube-api-access-qqlv9\") pod \"42f938ad-b496-4167-831d-ea2f3f865785\" (UID: \"42f938ad-b496-4167-831d-ea2f3f865785\") " Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.327222 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42f938ad-b496-4167-831d-ea2f3f865785-kube-api-access-qqlv9" (OuterVolumeSpecName: "kube-api-access-qqlv9") pod "42f938ad-b496-4167-831d-ea2f3f865785" (UID: "42f938ad-b496-4167-831d-ea2f3f865785"). InnerVolumeSpecName "kube-api-access-qqlv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.351919 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "42f938ad-b496-4167-831d-ea2f3f865785" (UID: "42f938ad-b496-4167-831d-ea2f3f865785"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.361104 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-config" (OuterVolumeSpecName: "config") pod "42f938ad-b496-4167-831d-ea2f3f865785" (UID: "42f938ad-b496-4167-831d-ea2f3f865785"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.420892 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.420939 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqlv9\" (UniqueName: \"kubernetes.io/projected/42f938ad-b496-4167-831d-ea2f3f865785-kube-api-access-qqlv9\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.420955 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42f938ad-b496-4167-831d-ea2f3f865785-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.452123 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.545406 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.550536 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.554455 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-gc282" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.554476 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.554673 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.554741 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.602335 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.610200 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bf86d7f99-djjb6"] Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.626855 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2330b767-24ec-4c55-9458-73ff85a96bc7-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.626949 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2330b767-24ec-4c55-9458-73ff85a96bc7-scripts\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.627012 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qmfs\" (UniqueName: \"kubernetes.io/projected/2330b767-24ec-4c55-9458-73ff85a96bc7-kube-api-access-9qmfs\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.627049 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2330b767-24ec-4c55-9458-73ff85a96bc7-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.627110 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/2330b767-24ec-4c55-9458-73ff85a96bc7-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.627153 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2330b767-24ec-4c55-9458-73ff85a96bc7-config\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.627208 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2330b767-24ec-4c55-9458-73ff85a96bc7-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.702867 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bf86d7f99-djjb6"] Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.734367 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2330b767-24ec-4c55-9458-73ff85a96bc7-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.734724 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2330b767-24ec-4c55-9458-73ff85a96bc7-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.734785 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2330b767-24ec-4c55-9458-73ff85a96bc7-scripts\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.734840 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qmfs\" (UniqueName: \"kubernetes.io/projected/2330b767-24ec-4c55-9458-73ff85a96bc7-kube-api-access-9qmfs\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.734868 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2330b767-24ec-4c55-9458-73ff85a96bc7-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.734927 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/2330b767-24ec-4c55-9458-73ff85a96bc7-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.734968 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2330b767-24ec-4c55-9458-73ff85a96bc7-config\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.736134 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2330b767-24ec-4c55-9458-73ff85a96bc7-config\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.736686 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2330b767-24ec-4c55-9458-73ff85a96bc7-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.740123 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2330b767-24ec-4c55-9458-73ff85a96bc7-scripts\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.742348 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2330b767-24ec-4c55-9458-73ff85a96bc7-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.742349 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2330b767-24ec-4c55-9458-73ff85a96bc7-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.742502 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-5b8rk"] Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.748893 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/2330b767-24ec-4c55-9458-73ff85a96bc7-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.753686 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qmfs\" (UniqueName: \"kubernetes.io/projected/2330b767-24ec-4c55-9458-73ff85a96bc7-kube-api-access-9qmfs\") pod \"ovn-northd-0\" (UID: \"2330b767-24ec-4c55-9458-73ff85a96bc7\") " pod="openstack/ovn-northd-0" Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.904302 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c7c495775-4v5r4"] Dec 05 12:28:15 crc kubenswrapper[4711]: I1205 12:28:15.936466 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.071945 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55ffbfff47-b5jn7"] Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.220564 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" event={"ID":"c53fd197-4405-433e-ab39-a71e4248b5b1","Type":"ContainerStarted","Data":"8a4b8cbc9a72e07fc45c94397310965d2b4116deb2d5a50682f6ed7e3a917689"} Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.226319 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" event={"ID":"9db9755e-38c2-44d8-a728-424a4ff924a0","Type":"ContainerStarted","Data":"fa2330e371540a6bf2b84370b6cd2124b33edf2efa97582aaa2c5763fffe346f"} Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.230594 4711 generic.go:334] "Generic (PLEG): container finished" podID="6768dcf2-c875-4467-9da9-3857b2fdb2e3" containerID="e7d622f6c154881eefb9bcda895deb00e64356587e50311045e9f6e0cfc9a061" exitCode=0 Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.230685 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6768dcf2-c875-4467-9da9-3857b2fdb2e3","Type":"ContainerDied","Data":"e7d622f6c154881eefb9bcda895deb00e64356587e50311045e9f6e0cfc9a061"} Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.241000 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-5b8rk" event={"ID":"1939bf8a-af4b-40df-b6a3-390a44292cfb","Type":"ContainerStarted","Data":"36a23c8a340b21717149eb066f9ec1317fb528bc063087fc540b822319455723"} Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.241051 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-5b8rk" event={"ID":"1939bf8a-af4b-40df-b6a3-390a44292cfb","Type":"ContainerStarted","Data":"19570c235317bba13215372b02a594f20196be89822eed0063b176ccfd7a863f"} Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.241240 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" podUID="c3d58992-44b1-4f63-9574-795a557bcf96" containerName="dnsmasq-dns" containerID="cri-o://c8cc2c4b5d7f7ccdf423d05c64982721203806226ed5cdd18dab411b3bb5f1c2" gracePeriod=10 Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.448853 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 12:28:16 crc kubenswrapper[4711]: W1205 12:28:16.453958 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2330b767_24ec_4c55_9458_73ff85a96bc7.slice/crio-e831bfffb46b05de7cf93dfe857e48ee8fb011eab1b5e36c1d712d8104b505a0 WatchSource:0}: Error finding container e831bfffb46b05de7cf93dfe857e48ee8fb011eab1b5e36c1d712d8104b505a0: Status 404 returned error can't find the container with id e831bfffb46b05de7cf93dfe857e48ee8fb011eab1b5e36c1d712d8104b505a0 Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.700491 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42f938ad-b496-4167-831d-ea2f3f865785" path="/var/lib/kubelet/pods/42f938ad-b496-4167-831d-ea2f3f865785/volumes" Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.761665 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:16 crc kubenswrapper[4711]: E1205 12:28:16.762708 4711 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:28:16 crc kubenswrapper[4711]: E1205 12:28:16.762742 4711 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:28:16 crc kubenswrapper[4711]: E1205 12:28:16.762816 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift podName:421d5855-985f-4d7f-9faf-c868088a7291 nodeName:}" failed. No retries permitted until 2025-12-05 12:28:20.762789594 +0000 UTC m=+1146.347111994 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift") pod "swift-storage-0" (UID: "421d5855-985f-4d7f-9faf-c868088a7291") : configmap "swift-ring-files" not found Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.976514 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-56jbm"] Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.978260 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.983702 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.984346 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.984532 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 12:28:16 crc kubenswrapper[4711]: I1205 12:28:16.994939 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-56jbm"] Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.035009 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-2jbtw"] Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.036161 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.057542 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-56jbm"] Dec 05 12:28:17 crc kubenswrapper[4711]: E1205 12:28:17.058082 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-ddz77 ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-56jbm" podUID="388dff95-b331-4033-95db-e59176370ffd" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.065988 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-2jbtw"] Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.066187 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddz77\" (UniqueName: \"kubernetes.io/projected/388dff95-b331-4033-95db-e59176370ffd-kube-api-access-ddz77\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.066259 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/388dff95-b331-4033-95db-e59176370ffd-etc-swift\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.066343 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-scripts\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.066499 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-swiftconf\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.066690 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-dispersionconf\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.066853 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-combined-ca-bundle\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.066887 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-ring-data-devices\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.169673 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-swiftconf\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.169792 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-combined-ca-bundle\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.169826 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-ring-data-devices\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.169886 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddz77\" (UniqueName: \"kubernetes.io/projected/388dff95-b331-4033-95db-e59176370ffd-kube-api-access-ddz77\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.169930 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdpvf\" (UniqueName: \"kubernetes.io/projected/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-kube-api-access-zdpvf\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.169969 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/388dff95-b331-4033-95db-e59176370ffd-etc-swift\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.169994 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-scripts\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.170046 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-etc-swift\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.170072 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-ring-data-devices\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.170165 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-scripts\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.170231 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-swiftconf\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.170323 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-dispersionconf\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.170365 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-dispersionconf\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.170494 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-combined-ca-bundle\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.170783 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/388dff95-b331-4033-95db-e59176370ffd-etc-swift\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.171055 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-ring-data-devices\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.172124 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-scripts\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.181872 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-combined-ca-bundle\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.181874 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-swiftconf\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.182462 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-dispersionconf\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.188038 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddz77\" (UniqueName: \"kubernetes.io/projected/388dff95-b331-4033-95db-e59176370ffd-kube-api-access-ddz77\") pod \"swift-ring-rebalance-56jbm\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.250101 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"2330b767-24ec-4c55-9458-73ff85a96bc7","Type":"ContainerStarted","Data":"e831bfffb46b05de7cf93dfe857e48ee8fb011eab1b5e36c1d712d8104b505a0"} Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.250191 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.270186 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.271698 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdpvf\" (UniqueName: \"kubernetes.io/projected/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-kube-api-access-zdpvf\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.271806 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-etc-swift\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.271829 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-ring-data-devices\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.271857 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-scripts\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.271898 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-dispersionconf\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.271930 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-combined-ca-bundle\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.271949 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-swiftconf\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.272429 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-etc-swift\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.272847 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-scripts\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.275072 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-ring-data-devices\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.276404 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-dispersionconf\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.276380 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-combined-ca-bundle\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.276972 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-swiftconf\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.288204 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdpvf\" (UniqueName: \"kubernetes.io/projected/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-kube-api-access-zdpvf\") pod \"swift-ring-rebalance-2jbtw\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.356226 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-rxrvr" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.364709 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.376408 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-dispersionconf\") pod \"388dff95-b331-4033-95db-e59176370ffd\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.376496 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddz77\" (UniqueName: \"kubernetes.io/projected/388dff95-b331-4033-95db-e59176370ffd-kube-api-access-ddz77\") pod \"388dff95-b331-4033-95db-e59176370ffd\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.376615 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/388dff95-b331-4033-95db-e59176370ffd-etc-swift\") pod \"388dff95-b331-4033-95db-e59176370ffd\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.376713 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-scripts\") pod \"388dff95-b331-4033-95db-e59176370ffd\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.376777 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-swiftconf\") pod \"388dff95-b331-4033-95db-e59176370ffd\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.376848 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-ring-data-devices\") pod \"388dff95-b331-4033-95db-e59176370ffd\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.376869 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-combined-ca-bundle\") pod \"388dff95-b331-4033-95db-e59176370ffd\" (UID: \"388dff95-b331-4033-95db-e59176370ffd\") " Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.377570 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/388dff95-b331-4033-95db-e59176370ffd-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "388dff95-b331-4033-95db-e59176370ffd" (UID: "388dff95-b331-4033-95db-e59176370ffd"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.377845 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-scripts" (OuterVolumeSpecName: "scripts") pod "388dff95-b331-4033-95db-e59176370ffd" (UID: "388dff95-b331-4033-95db-e59176370ffd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.379177 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "388dff95-b331-4033-95db-e59176370ffd" (UID: "388dff95-b331-4033-95db-e59176370ffd"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.380764 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/388dff95-b331-4033-95db-e59176370ffd-kube-api-access-ddz77" (OuterVolumeSpecName: "kube-api-access-ddz77") pod "388dff95-b331-4033-95db-e59176370ffd" (UID: "388dff95-b331-4033-95db-e59176370ffd"). InnerVolumeSpecName "kube-api-access-ddz77". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.380811 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "388dff95-b331-4033-95db-e59176370ffd" (UID: "388dff95-b331-4033-95db-e59176370ffd"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.383196 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "388dff95-b331-4033-95db-e59176370ffd" (UID: "388dff95-b331-4033-95db-e59176370ffd"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.385644 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "388dff95-b331-4033-95db-e59176370ffd" (UID: "388dff95-b331-4033-95db-e59176370ffd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.478670 4711 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/388dff95-b331-4033-95db-e59176370ffd-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.478747 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.478762 4711 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.478774 4711 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/388dff95-b331-4033-95db-e59176370ffd-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.478789 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.478798 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddz77\" (UniqueName: \"kubernetes.io/projected/388dff95-b331-4033-95db-e59176370ffd-kube-api-access-ddz77\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.478808 4711 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/388dff95-b331-4033-95db-e59176370ffd-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:17 crc kubenswrapper[4711]: I1205 12:28:17.891637 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-2jbtw"] Dec 05 12:28:17 crc kubenswrapper[4711]: W1205 12:28:17.893105 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64b58c93_4cf7_4623_ac23_b01d1fb62fa4.slice/crio-40afcb441ec983a6cf079186dc08418e5a33616cdca7f0135bf44082b721f8ab WatchSource:0}: Error finding container 40afcb441ec983a6cf079186dc08418e5a33616cdca7f0135bf44082b721f8ab: Status 404 returned error can't find the container with id 40afcb441ec983a6cf079186dc08418e5a33616cdca7f0135bf44082b721f8ab Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.259836 4711 generic.go:334] "Generic (PLEG): container finished" podID="c3d58992-44b1-4f63-9574-795a557bcf96" containerID="c8cc2c4b5d7f7ccdf423d05c64982721203806226ed5cdd18dab411b3bb5f1c2" exitCode=0 Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.259927 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" event={"ID":"c3d58992-44b1-4f63-9574-795a557bcf96","Type":"ContainerDied","Data":"c8cc2c4b5d7f7ccdf423d05c64982721203806226ed5cdd18dab411b3bb5f1c2"} Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.261570 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2jbtw" event={"ID":"64b58c93-4cf7-4623-ac23-b01d1fb62fa4","Type":"ContainerStarted","Data":"40afcb441ec983a6cf079186dc08418e5a33616cdca7f0135bf44082b721f8ab"} Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.261632 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-56jbm" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.285918 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-5b8rk" podStartSLOduration=4.285903752 podStartE2EDuration="4.285903752s" podCreationTimestamp="2025-12-05 12:28:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:18.283271207 +0000 UTC m=+1143.867593537" watchObservedRunningTime="2025-12-05 12:28:18.285903752 +0000 UTC m=+1143.870226082" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.301424 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.301496 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.301556 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.302268 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"165f762379e5d3244fd9c1d378fa831957f0f903eab36ae386a24b293865ce1b"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.302346 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://165f762379e5d3244fd9c1d378fa831957f0f903eab36ae386a24b293865ce1b" gracePeriod=600 Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.331464 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-56jbm"] Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.339335 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-56jbm"] Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.561490 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.702450 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n27w8\" (UniqueName: \"kubernetes.io/projected/c3d58992-44b1-4f63-9574-795a557bcf96-kube-api-access-n27w8\") pod \"c3d58992-44b1-4f63-9574-795a557bcf96\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.702510 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-dns-svc\") pod \"c3d58992-44b1-4f63-9574-795a557bcf96\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.702667 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-config\") pod \"c3d58992-44b1-4f63-9574-795a557bcf96\" (UID: \"c3d58992-44b1-4f63-9574-795a557bcf96\") " Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.707444 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="388dff95-b331-4033-95db-e59176370ffd" path="/var/lib/kubelet/pods/388dff95-b331-4033-95db-e59176370ffd/volumes" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.710181 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3d58992-44b1-4f63-9574-795a557bcf96-kube-api-access-n27w8" (OuterVolumeSpecName: "kube-api-access-n27w8") pod "c3d58992-44b1-4f63-9574-795a557bcf96" (UID: "c3d58992-44b1-4f63-9574-795a557bcf96"). InnerVolumeSpecName "kube-api-access-n27w8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.756461 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-config" (OuterVolumeSpecName: "config") pod "c3d58992-44b1-4f63-9574-795a557bcf96" (UID: "c3d58992-44b1-4f63-9574-795a557bcf96"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.777045 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c3d58992-44b1-4f63-9574-795a557bcf96" (UID: "c3d58992-44b1-4f63-9574-795a557bcf96"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.805120 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.805154 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n27w8\" (UniqueName: \"kubernetes.io/projected/c3d58992-44b1-4f63-9574-795a557bcf96-kube-api-access-n27w8\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:18 crc kubenswrapper[4711]: I1205 12:28:18.805163 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3d58992-44b1-4f63-9574-795a557bcf96-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.269948 4711 generic.go:334] "Generic (PLEG): container finished" podID="9db9755e-38c2-44d8-a728-424a4ff924a0" containerID="f36e03dd25124e83e8662993d7d082575dc8b6cab22d822f5db544bebd18346b" exitCode=0 Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.270367 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" event={"ID":"9db9755e-38c2-44d8-a728-424a4ff924a0","Type":"ContainerDied","Data":"f36e03dd25124e83e8662993d7d082575dc8b6cab22d822f5db544bebd18346b"} Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.273884 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6768dcf2-c875-4467-9da9-3857b2fdb2e3","Type":"ContainerStarted","Data":"4a2e8bfb19c0bda4e214c79932bfd2b1e25992f9eaaac3f6cd17bf3110967b03"} Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.279331 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.279525 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc455bbff-m65tc" event={"ID":"c3d58992-44b1-4f63-9574-795a557bcf96","Type":"ContainerDied","Data":"0a28c41ec03e507a0d723e112b5e71d135b0db31dfcf14af5633beecdf2859ce"} Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.279586 4711 scope.go:117] "RemoveContainer" containerID="c8cc2c4b5d7f7ccdf423d05c64982721203806226ed5cdd18dab411b3bb5f1c2" Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.283196 4711 generic.go:334] "Generic (PLEG): container finished" podID="c53fd197-4405-433e-ab39-a71e4248b5b1" containerID="f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb" exitCode=0 Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.283263 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" event={"ID":"c53fd197-4405-433e-ab39-a71e4248b5b1","Type":"ContainerDied","Data":"f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb"} Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.289312 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="165f762379e5d3244fd9c1d378fa831957f0f903eab36ae386a24b293865ce1b" exitCode=0 Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.289358 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"165f762379e5d3244fd9c1d378fa831957f0f903eab36ae386a24b293865ce1b"} Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.345249 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=13.811824307 podStartE2EDuration="1m12.345232098s" podCreationTimestamp="2025-12-05 12:27:07 +0000 UTC" firstStartedPulling="2025-12-05 12:27:10.243923174 +0000 UTC m=+1075.828245504" lastFinishedPulling="2025-12-05 12:28:08.777330965 +0000 UTC m=+1134.361653295" observedRunningTime="2025-12-05 12:28:19.34494918 +0000 UTC m=+1144.929271530" watchObservedRunningTime="2025-12-05 12:28:19.345232098 +0000 UTC m=+1144.929554428" Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.378286 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc455bbff-m65tc"] Dec 05 12:28:19 crc kubenswrapper[4711]: I1205 12:28:19.385511 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc455bbff-m65tc"] Dec 05 12:28:20 crc kubenswrapper[4711]: I1205 12:28:20.698102 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3d58992-44b1-4f63-9574-795a557bcf96" path="/var/lib/kubelet/pods/c3d58992-44b1-4f63-9574-795a557bcf96/volumes" Dec 05 12:28:20 crc kubenswrapper[4711]: I1205 12:28:20.853062 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:20 crc kubenswrapper[4711]: E1205 12:28:20.853330 4711 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:28:20 crc kubenswrapper[4711]: E1205 12:28:20.853347 4711 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:28:20 crc kubenswrapper[4711]: E1205 12:28:20.853415 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift podName:421d5855-985f-4d7f-9faf-c868088a7291 nodeName:}" failed. No retries permitted until 2025-12-05 12:28:28.853380488 +0000 UTC m=+1154.437702818 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift") pod "swift-storage-0" (UID: "421d5855-985f-4d7f-9faf-c868088a7291") : configmap "swift-ring-files" not found Dec 05 12:28:21 crc kubenswrapper[4711]: I1205 12:28:21.304163 4711 generic.go:334] "Generic (PLEG): container finished" podID="feb938be-1a43-402f-8373-47a6c9217d9c" containerID="d26802cf75fd94bc8cfd7d324385f87453dcf90d0150b3e945560d5c88c93547" exitCode=0 Dec 05 12:28:21 crc kubenswrapper[4711]: I1205 12:28:21.304213 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feb938be-1a43-402f-8373-47a6c9217d9c","Type":"ContainerDied","Data":"d26802cf75fd94bc8cfd7d324385f87453dcf90d0150b3e945560d5c88c93547"} Dec 05 12:28:21 crc kubenswrapper[4711]: I1205 12:28:21.790489 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 12:28:28 crc kubenswrapper[4711]: E1205 12:28:28.880373 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-ovn-northd:watcher_latest" Dec 05 12:28:28 crc kubenswrapper[4711]: E1205 12:28:28.880975 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-ovn-northd:watcher_latest" Dec 05 12:28:28 crc kubenswrapper[4711]: E1205 12:28:28.881159 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-northd,Image:38.102.83.20:5001/podified-master-centos10/openstack-ovn-northd:watcher_latest,Command:[/usr/bin/ovn-northd],Args:[-vfile:off -vconsole:info --n-threads=1 --ovnnb-db=ssl:ovsdbserver-nb-0.openstack.svc.cluster.local:6641 --ovnsb-db=ssl:ovsdbserver-sb-0.openstack.svc.cluster.local:6642 --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n66dh65ch647h64fh5bdh57h647hdbhcch57h676h594h66h585h59bh57h584h577h59ch79hfh8dh5d6h55dhc7h5cch57ch5f7h66dh685h8chbbq,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:certs,Value:n76h548h648h98hd6h5fh5bfhf9h574h545h76hf7h566h76h5d9h567hd5h564h57h66bh696h678h54ch5cdh67dh54ch578h7bh695h675hb6h88q,ValueFrom:nil,},EnvVar{Name:certs_metrics,Value:n577hf4h699h678h66dh8dh5c8h544h5b7h66dhc7h75hb5hb6h7bh8h5fh659h647h696h5dch5cbh66ch5c6h7ch67dh64fh6h65fh5b9h5b7h65fq,ValueFrom:nil,},EnvVar{Name:ovnnorthd-config,Value:n5c8h7ch56bh8dh8hc4h5dch9dh68h6bhb7h598h549h5dbh66fh6bh5b4h5cch5d6h55ch57fhfch588h89h5ddh5d6h65bh65bh8dhc4h67dh569q,ValueFrom:nil,},EnvVar{Name:ovnnorthd-scripts,Value:n664hd8h66ch58dh64hc9h66bhd4h558h697h67bh557hdch664h567h669h555h696h556h556h5fh5bh569hbh665h9dh4h9bh564hc8h5b7h5c4q,ValueFrom:nil,},EnvVar{Name:tls-ca-bundle.pem,Value:ncfh64h589h668h66fh595h68fh64fh5fdh564hb9h557h547h89h5dfh67h56ch574h66bhc9h56bhb8hd5h676h5fbh546h86h67bh5fch5hbbhd4q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-northd-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-northd-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-northd-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9qmfs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/status_check.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:1,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/status_check.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:1,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-northd-0_openstack(2330b767-24ec-4c55-9458-73ff85a96bc7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:28:28 crc kubenswrapper[4711]: I1205 12:28:28.900702 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:28 crc kubenswrapper[4711]: E1205 12:28:28.900990 4711 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:28:28 crc kubenswrapper[4711]: E1205 12:28:28.901033 4711 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:28:28 crc kubenswrapper[4711]: E1205 12:28:28.901122 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift podName:421d5855-985f-4d7f-9faf-c868088a7291 nodeName:}" failed. No retries permitted until 2025-12-05 12:28:44.901092553 +0000 UTC m=+1170.485414893 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift") pod "swift-storage-0" (UID: "421d5855-985f-4d7f-9faf-c868088a7291") : configmap "swift-ring-files" not found Dec 05 12:28:28 crc kubenswrapper[4711]: I1205 12:28:28.963470 4711 scope.go:117] "RemoveContainer" containerID="52657d1a02bc3c3053e27fd05c5b9870468c34b394063a1613e2ae0168ec39aa" Dec 05 12:28:29 crc kubenswrapper[4711]: I1205 12:28:29.003533 4711 scope.go:117] "RemoveContainer" containerID="472c49a169e1c65e73aac62e2dd2cc7781a63dc02785bf64789fa6376616fd5d" Dec 05 12:28:29 crc kubenswrapper[4711]: I1205 12:28:29.169315 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 12:28:29 crc kubenswrapper[4711]: I1205 12:28:29.169702 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 12:28:29 crc kubenswrapper[4711]: E1205 12:28:29.407338 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-northd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-northd-0" podUID="2330b767-24ec-4c55-9458-73ff85a96bc7" Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.382160 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" event={"ID":"c53fd197-4405-433e-ab39-a71e4248b5b1","Type":"ContainerStarted","Data":"a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd"} Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.382503 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.387209 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"39ed3b2d5509b3071d8cb2dc86698b520f2d0e9ed254bb880705f90278301c5c"} Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.390626 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"feb938be-1a43-402f-8373-47a6c9217d9c","Type":"ContainerStarted","Data":"f6d0b82e84da48baa9dd0bf3bd7d7b67002ad8b89ca1dbae8399f61508e609b8"} Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.418547 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" event={"ID":"9db9755e-38c2-44d8-a728-424a4ff924a0","Type":"ContainerStarted","Data":"9f2edb50c520d49dffb48325213f290e472a0a63fad383e0c9a7ec20ddc5efba"} Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.418658 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.420095 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"2330b767-24ec-4c55-9458-73ff85a96bc7","Type":"ContainerStarted","Data":"5985fd817bcee813274d33bef17a3be4cf7861b90223fdfca87f5830f556256a"} Dec 05 12:28:30 crc kubenswrapper[4711]: E1205 12:28:30.422046 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-northd\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-ovn-northd:watcher_latest\\\"\"" pod="openstack/ovn-northd-0" podUID="2330b767-24ec-4c55-9458-73ff85a96bc7" Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.428533 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" podStartSLOduration=16.428510627 podStartE2EDuration="16.428510627s" podCreationTimestamp="2025-12-05 12:28:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:30.417924517 +0000 UTC m=+1156.002246857" watchObservedRunningTime="2025-12-05 12:28:30.428510627 +0000 UTC m=+1156.012832957" Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.435719 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerStarted","Data":"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996"} Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.518601 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371952.336193 podStartE2EDuration="1m24.518582535s" podCreationTimestamp="2025-12-05 12:27:06 +0000 UTC" firstStartedPulling="2025-12-05 12:27:08.562647981 +0000 UTC m=+1074.146970311" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:30.491568643 +0000 UTC m=+1156.075890973" watchObservedRunningTime="2025-12-05 12:28:30.518582535 +0000 UTC m=+1156.102904855" Dec 05 12:28:30 crc kubenswrapper[4711]: I1205 12:28:30.557129 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" podStartSLOduration=16.557109529 podStartE2EDuration="16.557109529s" podCreationTimestamp="2025-12-05 12:28:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:30.550827776 +0000 UTC m=+1156.135150106" watchObservedRunningTime="2025-12-05 12:28:30.557109529 +0000 UTC m=+1156.141431849" Dec 05 12:28:31 crc kubenswrapper[4711]: I1205 12:28:31.388979 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 12:28:31 crc kubenswrapper[4711]: E1205 12:28:31.451307 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-northd\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-ovn-northd:watcher_latest\\\"\"" pod="openstack/ovn-northd-0" podUID="2330b767-24ec-4c55-9458-73ff85a96bc7" Dec 05 12:28:31 crc kubenswrapper[4711]: I1205 12:28:31.501902 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="6768dcf2-c875-4467-9da9-3857b2fdb2e3" containerName="galera" probeResult="failure" output=< Dec 05 12:28:31 crc kubenswrapper[4711]: wsrep_local_state_comment (Joined) differs from Synced Dec 05 12:28:31 crc kubenswrapper[4711]: > Dec 05 12:28:32 crc kubenswrapper[4711]: I1205 12:28:32.453626 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2jbtw" event={"ID":"64b58c93-4cf7-4623-ac23-b01d1fb62fa4","Type":"ContainerStarted","Data":"dbfc198e536173661da640f0911788d9e706f2909d242d9123704f4cb5a86a2d"} Dec 05 12:28:32 crc kubenswrapper[4711]: I1205 12:28:32.456534 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerStarted","Data":"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3"} Dec 05 12:28:34 crc kubenswrapper[4711]: I1205 12:28:34.957678 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:34 crc kubenswrapper[4711]: I1205 12:28:34.986684 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-2jbtw" podStartSLOduration=3.835482489 podStartE2EDuration="17.986663586s" podCreationTimestamp="2025-12-05 12:28:17 +0000 UTC" firstStartedPulling="2025-12-05 12:28:17.89558395 +0000 UTC m=+1143.479906280" lastFinishedPulling="2025-12-05 12:28:32.046765047 +0000 UTC m=+1157.631087377" observedRunningTime="2025-12-05 12:28:32.477564871 +0000 UTC m=+1158.061887201" watchObservedRunningTime="2025-12-05 12:28:34.986663586 +0000 UTC m=+1160.570985916" Dec 05 12:28:35 crc kubenswrapper[4711]: E1205 12:28:35.308861 4711 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.38:56106->38.129.56.38:38309: write tcp 38.129.56.38:56106->38.129.56.38:38309: write: connection reset by peer Dec 05 12:28:35 crc kubenswrapper[4711]: I1205 12:28:35.454246 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:28:35 crc kubenswrapper[4711]: I1205 12:28:35.514325 4711 generic.go:334] "Generic (PLEG): container finished" podID="c5ef8db4-5a97-483c-a168-bcf6368849a2" containerID="47f67c484cab171bc640a44e92ecd16dea282d80e2d48d886adc47eb22d914fe" exitCode=0 Dec 05 12:28:35 crc kubenswrapper[4711]: I1205 12:28:35.514439 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5ef8db4-5a97-483c-a168-bcf6368849a2","Type":"ContainerDied","Data":"47f67c484cab171bc640a44e92ecd16dea282d80e2d48d886adc47eb22d914fe"} Dec 05 12:28:35 crc kubenswrapper[4711]: I1205 12:28:35.521995 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c7c495775-4v5r4"] Dec 05 12:28:35 crc kubenswrapper[4711]: I1205 12:28:35.522229 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" podUID="c53fd197-4405-433e-ab39-a71e4248b5b1" containerName="dnsmasq-dns" containerID="cri-o://a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd" gracePeriod=10 Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.010964 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.149824 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-dns-svc\") pod \"c53fd197-4405-433e-ab39-a71e4248b5b1\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.150832 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh8s7\" (UniqueName: \"kubernetes.io/projected/c53fd197-4405-433e-ab39-a71e4248b5b1-kube-api-access-bh8s7\") pod \"c53fd197-4405-433e-ab39-a71e4248b5b1\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.150884 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-ovsdbserver-sb\") pod \"c53fd197-4405-433e-ab39-a71e4248b5b1\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.150928 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-config\") pod \"c53fd197-4405-433e-ab39-a71e4248b5b1\" (UID: \"c53fd197-4405-433e-ab39-a71e4248b5b1\") " Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.158119 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c53fd197-4405-433e-ab39-a71e4248b5b1-kube-api-access-bh8s7" (OuterVolumeSpecName: "kube-api-access-bh8s7") pod "c53fd197-4405-433e-ab39-a71e4248b5b1" (UID: "c53fd197-4405-433e-ab39-a71e4248b5b1"). InnerVolumeSpecName "kube-api-access-bh8s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.208044 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c53fd197-4405-433e-ab39-a71e4248b5b1" (UID: "c53fd197-4405-433e-ab39-a71e4248b5b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.214636 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-config" (OuterVolumeSpecName: "config") pod "c53fd197-4405-433e-ab39-a71e4248b5b1" (UID: "c53fd197-4405-433e-ab39-a71e4248b5b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.218909 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c53fd197-4405-433e-ab39-a71e4248b5b1" (UID: "c53fd197-4405-433e-ab39-a71e4248b5b1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.253678 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh8s7\" (UniqueName: \"kubernetes.io/projected/c53fd197-4405-433e-ab39-a71e4248b5b1-kube-api-access-bh8s7\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.253737 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.253758 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.253772 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53fd197-4405-433e-ab39-a71e4248b5b1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.524845 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5ef8db4-5a97-483c-a168-bcf6368849a2","Type":"ContainerStarted","Data":"53b8334dfadeb2202608549200bcd680b7af0b50d02278712b930c5b5275f6f9"} Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.525846 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.528018 4711 generic.go:334] "Generic (PLEG): container finished" podID="c53fd197-4405-433e-ab39-a71e4248b5b1" containerID="a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd" exitCode=0 Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.528049 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" event={"ID":"c53fd197-4405-433e-ab39-a71e4248b5b1","Type":"ContainerDied","Data":"a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd"} Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.528058 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.528067 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7c495775-4v5r4" event={"ID":"c53fd197-4405-433e-ab39-a71e4248b5b1","Type":"ContainerDied","Data":"8a4b8cbc9a72e07fc45c94397310965d2b4116deb2d5a50682f6ed7e3a917689"} Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.528082 4711 scope.go:117] "RemoveContainer" containerID="a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.566509 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.634957399 podStartE2EDuration="1m32.566482164s" podCreationTimestamp="2025-12-05 12:27:04 +0000 UTC" firstStartedPulling="2025-12-05 12:27:06.384446094 +0000 UTC m=+1071.968768424" lastFinishedPulling="2025-12-05 12:28:01.315970859 +0000 UTC m=+1126.900293189" observedRunningTime="2025-12-05 12:28:36.561956673 +0000 UTC m=+1162.146279023" watchObservedRunningTime="2025-12-05 12:28:36.566482164 +0000 UTC m=+1162.150804504" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.602049 4711 scope.go:117] "RemoveContainer" containerID="f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.606746 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c7c495775-4v5r4"] Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.618261 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c7c495775-4v5r4"] Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.639614 4711 scope.go:117] "RemoveContainer" containerID="a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd" Dec 05 12:28:36 crc kubenswrapper[4711]: E1205 12:28:36.640321 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd\": container with ID starting with a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd not found: ID does not exist" containerID="a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.640370 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd"} err="failed to get container status \"a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd\": rpc error: code = NotFound desc = could not find container \"a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd\": container with ID starting with a306a32b2c75df4099168dcd841de9f9a69cc8e8a890e4c1e5c89b67e2684cdd not found: ID does not exist" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.640420 4711 scope.go:117] "RemoveContainer" containerID="f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb" Dec 05 12:28:36 crc kubenswrapper[4711]: E1205 12:28:36.640890 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb\": container with ID starting with f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb not found: ID does not exist" containerID="f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.640941 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb"} err="failed to get container status \"f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb\": rpc error: code = NotFound desc = could not find container \"f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb\": container with ID starting with f511ba73f60d58522caf644827abc07ea4a7c25dca3955a991d91b00e04565eb not found: ID does not exist" Dec 05 12:28:36 crc kubenswrapper[4711]: I1205 12:28:36.695480 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c53fd197-4405-433e-ab39-a71e4248b5b1" path="/var/lib/kubelet/pods/c53fd197-4405-433e-ab39-a71e4248b5b1/volumes" Dec 05 12:28:37 crc kubenswrapper[4711]: I1205 12:28:37.537792 4711 generic.go:334] "Generic (PLEG): container finished" podID="d1b49e15-30ab-4ef7-8980-436468104f7b" containerID="f1eee60c73d3476b4b977ec3bf5f33cedee51a31851ffdb613d2bff848f30625" exitCode=0 Dec 05 12:28:37 crc kubenswrapper[4711]: I1205 12:28:37.537877 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"d1b49e15-30ab-4ef7-8980-436468104f7b","Type":"ContainerDied","Data":"f1eee60c73d3476b4b977ec3bf5f33cedee51a31851ffdb613d2bff848f30625"} Dec 05 12:28:37 crc kubenswrapper[4711]: I1205 12:28:37.539859 4711 generic.go:334] "Generic (PLEG): container finished" podID="bff43887-0cb0-4da0-a16f-6264877c473e" containerID="b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec" exitCode=0 Dec 05 12:28:37 crc kubenswrapper[4711]: I1205 12:28:37.539949 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bff43887-0cb0-4da0-a16f-6264877c473e","Type":"ContainerDied","Data":"b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec"} Dec 05 12:28:37 crc kubenswrapper[4711]: I1205 12:28:37.664861 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 12:28:37 crc kubenswrapper[4711]: I1205 12:28:37.664908 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.056947 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.136163 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4wg9n" podUID="13a40f45-a612-477e-b883-94012252a457" containerName="ovn-controller" probeResult="failure" output=< Dec 05 12:28:39 crc kubenswrapper[4711]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 12:28:39 crc kubenswrapper[4711]: > Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.172774 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.177709 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-28s5p" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.326029 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.435786 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4wg9n-config-6xs2c"] Dec 05 12:28:39 crc kubenswrapper[4711]: E1205 12:28:39.436198 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3d58992-44b1-4f63-9574-795a557bcf96" containerName="dnsmasq-dns" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.436223 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3d58992-44b1-4f63-9574-795a557bcf96" containerName="dnsmasq-dns" Dec 05 12:28:39 crc kubenswrapper[4711]: E1205 12:28:39.436244 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53fd197-4405-433e-ab39-a71e4248b5b1" containerName="init" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.436253 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53fd197-4405-433e-ab39-a71e4248b5b1" containerName="init" Dec 05 12:28:39 crc kubenswrapper[4711]: E1205 12:28:39.436280 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3d58992-44b1-4f63-9574-795a557bcf96" containerName="init" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.436288 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3d58992-44b1-4f63-9574-795a557bcf96" containerName="init" Dec 05 12:28:39 crc kubenswrapper[4711]: E1205 12:28:39.436302 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53fd197-4405-433e-ab39-a71e4248b5b1" containerName="dnsmasq-dns" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.436311 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53fd197-4405-433e-ab39-a71e4248b5b1" containerName="dnsmasq-dns" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.436945 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3d58992-44b1-4f63-9574-795a557bcf96" containerName="dnsmasq-dns" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.436999 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c53fd197-4405-433e-ab39-a71e4248b5b1" containerName="dnsmasq-dns" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.437762 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.440675 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.452252 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4wg9n-config-6xs2c"] Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.519342 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-scripts\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.519548 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.519587 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run-ovn\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.519627 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfvst\" (UniqueName: \"kubernetes.io/projected/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-kube-api-access-xfvst\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.519773 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-log-ovn\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.519846 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-additional-scripts\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.621211 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run-ovn\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.621324 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfvst\" (UniqueName: \"kubernetes.io/projected/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-kube-api-access-xfvst\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.621417 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-log-ovn\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.621444 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-additional-scripts\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.621496 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-scripts\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.621593 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run-ovn\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.621708 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.621757 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-log-ovn\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.622377 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.622715 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-additional-scripts\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.624410 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-scripts\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.640575 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfvst\" (UniqueName: \"kubernetes.io/projected/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-kube-api-access-xfvst\") pod \"ovn-controller-4wg9n-config-6xs2c\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.670597 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 12:28:39 crc kubenswrapper[4711]: I1205 12:28:39.760947 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:40 crc kubenswrapper[4711]: I1205 12:28:40.564900 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bff43887-0cb0-4da0-a16f-6264877c473e","Type":"ContainerStarted","Data":"5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849"} Dec 05 12:28:40 crc kubenswrapper[4711]: I1205 12:28:40.565452 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:28:40 crc kubenswrapper[4711]: I1205 12:28:40.566974 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"d1b49e15-30ab-4ef7-8980-436468104f7b","Type":"ContainerStarted","Data":"0d2096f4ff267ae51f33237714be12feb5c114a1602da563a68a1397331ded80"} Dec 05 12:28:40 crc kubenswrapper[4711]: I1205 12:28:40.567358 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:28:40 crc kubenswrapper[4711]: I1205 12:28:40.589064 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.930829382 podStartE2EDuration="1m37.58904862s" podCreationTimestamp="2025-12-05 12:27:03 +0000 UTC" firstStartedPulling="2025-12-05 12:27:05.932316004 +0000 UTC m=+1071.516638334" lastFinishedPulling="2025-12-05 12:28:03.590535242 +0000 UTC m=+1129.174857572" observedRunningTime="2025-12-05 12:28:40.586685363 +0000 UTC m=+1166.171007713" watchObservedRunningTime="2025-12-05 12:28:40.58904862 +0000 UTC m=+1166.173370950" Dec 05 12:28:40 crc kubenswrapper[4711]: I1205 12:28:40.632109 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-notifications-server-0" podStartSLOduration=-9223371940.222692 podStartE2EDuration="1m36.632082795s" podCreationTimestamp="2025-12-05 12:27:04 +0000 UTC" firstStartedPulling="2025-12-05 12:27:07.002075495 +0000 UTC m=+1072.586397825" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:40.620617084 +0000 UTC m=+1166.204939434" watchObservedRunningTime="2025-12-05 12:28:40.632082795 +0000 UTC m=+1166.216405125" Dec 05 12:28:41 crc kubenswrapper[4711]: I1205 12:28:41.884398 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-b300-account-create-update-nd6hj"] Dec 05 12:28:41 crc kubenswrapper[4711]: I1205 12:28:41.885714 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:41 crc kubenswrapper[4711]: I1205 12:28:41.893404 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Dec 05 12:28:41 crc kubenswrapper[4711]: I1205 12:28:41.899798 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-b300-account-create-update-nd6hj"] Dec 05 12:28:41 crc kubenswrapper[4711]: I1205 12:28:41.930871 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-9d6hb"] Dec 05 12:28:41 crc kubenswrapper[4711]: I1205 12:28:41.932131 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:41 crc kubenswrapper[4711]: I1205 12:28:41.962123 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-9d6hb"] Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.066018 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v6bk\" (UniqueName: \"kubernetes.io/projected/697913df-02ee-441f-a751-ba53f26158ed-kube-api-access-4v6bk\") pod \"watcher-b300-account-create-update-nd6hj\" (UID: \"697913df-02ee-441f-a751-ba53f26158ed\") " pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.066872 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/697913df-02ee-441f-a751-ba53f26158ed-operator-scripts\") pod \"watcher-b300-account-create-update-nd6hj\" (UID: \"697913df-02ee-441f-a751-ba53f26158ed\") " pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.067068 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwj6m\" (UniqueName: \"kubernetes.io/projected/73d6ddd8-359e-4d13-884f-2d3808250318-kube-api-access-fwj6m\") pod \"watcher-db-create-9d6hb\" (UID: \"73d6ddd8-359e-4d13-884f-2d3808250318\") " pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.067233 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73d6ddd8-359e-4d13-884f-2d3808250318-operator-scripts\") pod \"watcher-db-create-9d6hb\" (UID: \"73d6ddd8-359e-4d13-884f-2d3808250318\") " pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.168572 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwj6m\" (UniqueName: \"kubernetes.io/projected/73d6ddd8-359e-4d13-884f-2d3808250318-kube-api-access-fwj6m\") pod \"watcher-db-create-9d6hb\" (UID: \"73d6ddd8-359e-4d13-884f-2d3808250318\") " pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.168955 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73d6ddd8-359e-4d13-884f-2d3808250318-operator-scripts\") pod \"watcher-db-create-9d6hb\" (UID: \"73d6ddd8-359e-4d13-884f-2d3808250318\") " pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.169099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v6bk\" (UniqueName: \"kubernetes.io/projected/697913df-02ee-441f-a751-ba53f26158ed-kube-api-access-4v6bk\") pod \"watcher-b300-account-create-update-nd6hj\" (UID: \"697913df-02ee-441f-a751-ba53f26158ed\") " pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.169342 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/697913df-02ee-441f-a751-ba53f26158ed-operator-scripts\") pod \"watcher-b300-account-create-update-nd6hj\" (UID: \"697913df-02ee-441f-a751-ba53f26158ed\") " pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.169585 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73d6ddd8-359e-4d13-884f-2d3808250318-operator-scripts\") pod \"watcher-db-create-9d6hb\" (UID: \"73d6ddd8-359e-4d13-884f-2d3808250318\") " pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.170003 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/697913df-02ee-441f-a751-ba53f26158ed-operator-scripts\") pod \"watcher-b300-account-create-update-nd6hj\" (UID: \"697913df-02ee-441f-a751-ba53f26158ed\") " pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.193998 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v6bk\" (UniqueName: \"kubernetes.io/projected/697913df-02ee-441f-a751-ba53f26158ed-kube-api-access-4v6bk\") pod \"watcher-b300-account-create-update-nd6hj\" (UID: \"697913df-02ee-441f-a751-ba53f26158ed\") " pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.194848 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwj6m\" (UniqueName: \"kubernetes.io/projected/73d6ddd8-359e-4d13-884f-2d3808250318-kube-api-access-fwj6m\") pod \"watcher-db-create-9d6hb\" (UID: \"73d6ddd8-359e-4d13-884f-2d3808250318\") " pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.206955 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.251639 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.743071 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4wg9n-config-6xs2c"] Dec 05 12:28:42 crc kubenswrapper[4711]: W1205 12:28:42.757024 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0c508c8_2ff0_4dc7_85d2_4e92742665e0.slice/crio-74648bc47abd7873cbb6de3fcbaf0ad5855410b2b9077ea768359bbb6af37c89 WatchSource:0}: Error finding container 74648bc47abd7873cbb6de3fcbaf0ad5855410b2b9077ea768359bbb6af37c89: Status 404 returned error can't find the container with id 74648bc47abd7873cbb6de3fcbaf0ad5855410b2b9077ea768359bbb6af37c89 Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.841191 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-b300-account-create-update-nd6hj"] Dec 05 12:28:42 crc kubenswrapper[4711]: W1205 12:28:42.851198 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod697913df_02ee_441f_a751_ba53f26158ed.slice/crio-e0d059b287b8444eeca9dce90e2260b2d00e1cd355d174d557a0a24221a929d5 WatchSource:0}: Error finding container e0d059b287b8444eeca9dce90e2260b2d00e1cd355d174d557a0a24221a929d5: Status 404 returned error can't find the container with id e0d059b287b8444eeca9dce90e2260b2d00e1cd355d174d557a0a24221a929d5 Dec 05 12:28:42 crc kubenswrapper[4711]: I1205 12:28:42.852973 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-9d6hb"] Dec 05 12:28:42 crc kubenswrapper[4711]: W1205 12:28:42.854521 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73d6ddd8_359e_4d13_884f_2d3808250318.slice/crio-db272dc09980bf1fbb9c257c532532623af67d1528c64ec8d46a48b27ab38f74 WatchSource:0}: Error finding container db272dc09980bf1fbb9c257c532532623af67d1528c64ec8d46a48b27ab38f74: Status 404 returned error can't find the container with id db272dc09980bf1fbb9c257c532532623af67d1528c64ec8d46a48b27ab38f74 Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.610303 4711 generic.go:334] "Generic (PLEG): container finished" podID="697913df-02ee-441f-a751-ba53f26158ed" containerID="0d706ca798f565ae9cedbe11bce3e3e7672922350d7b6247c43afa4dc596afe9" exitCode=0 Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.610421 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-b300-account-create-update-nd6hj" event={"ID":"697913df-02ee-441f-a751-ba53f26158ed","Type":"ContainerDied","Data":"0d706ca798f565ae9cedbe11bce3e3e7672922350d7b6247c43afa4dc596afe9"} Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.610733 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-b300-account-create-update-nd6hj" event={"ID":"697913df-02ee-441f-a751-ba53f26158ed","Type":"ContainerStarted","Data":"e0d059b287b8444eeca9dce90e2260b2d00e1cd355d174d557a0a24221a929d5"} Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.613223 4711 generic.go:334] "Generic (PLEG): container finished" podID="e0c508c8-2ff0-4dc7-85d2-4e92742665e0" containerID="ab32dc5d8a5a2e81d3e113bfab5b59ffce9256d55aeb06cadaac5fddb318ab74" exitCode=0 Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.613246 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4wg9n-config-6xs2c" event={"ID":"e0c508c8-2ff0-4dc7-85d2-4e92742665e0","Type":"ContainerDied","Data":"ab32dc5d8a5a2e81d3e113bfab5b59ffce9256d55aeb06cadaac5fddb318ab74"} Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.613284 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4wg9n-config-6xs2c" event={"ID":"e0c508c8-2ff0-4dc7-85d2-4e92742665e0","Type":"ContainerStarted","Data":"74648bc47abd7873cbb6de3fcbaf0ad5855410b2b9077ea768359bbb6af37c89"} Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.616879 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerStarted","Data":"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f"} Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.619253 4711 generic.go:334] "Generic (PLEG): container finished" podID="73d6ddd8-359e-4d13-884f-2d3808250318" containerID="fdb629b0b1a9720b51a6e90599267a71c0f7e8952c59a880bbd76034c3352032" exitCode=0 Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.619285 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-9d6hb" event={"ID":"73d6ddd8-359e-4d13-884f-2d3808250318","Type":"ContainerDied","Data":"fdb629b0b1a9720b51a6e90599267a71c0f7e8952c59a880bbd76034c3352032"} Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.619319 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-9d6hb" event={"ID":"73d6ddd8-359e-4d13-884f-2d3808250318","Type":"ContainerStarted","Data":"db272dc09980bf1fbb9c257c532532623af67d1528c64ec8d46a48b27ab38f74"} Dec 05 12:28:43 crc kubenswrapper[4711]: I1205 12:28:43.668030 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=29.259750555 podStartE2EDuration="1m32.668010808s" podCreationTimestamp="2025-12-05 12:27:11 +0000 UTC" firstStartedPulling="2025-12-05 12:27:38.834715573 +0000 UTC m=+1104.419037903" lastFinishedPulling="2025-12-05 12:28:42.242975826 +0000 UTC m=+1167.827298156" observedRunningTime="2025-12-05 12:28:43.660827962 +0000 UTC m=+1169.245150292" watchObservedRunningTime="2025-12-05 12:28:43.668010808 +0000 UTC m=+1169.252333138" Dec 05 12:28:44 crc kubenswrapper[4711]: I1205 12:28:44.142649 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4wg9n" podUID="13a40f45-a612-477e-b883-94012252a457" containerName="ovn-controller" probeResult="failure" output=< Dec 05 12:28:44 crc kubenswrapper[4711]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 12:28:44 crc kubenswrapper[4711]: > Dec 05 12:28:44 crc kubenswrapper[4711]: I1205 12:28:44.632388 4711 generic.go:334] "Generic (PLEG): container finished" podID="64b58c93-4cf7-4623-ac23-b01d1fb62fa4" containerID="dbfc198e536173661da640f0911788d9e706f2909d242d9123704f4cb5a86a2d" exitCode=0 Dec 05 12:28:44 crc kubenswrapper[4711]: I1205 12:28:44.632443 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2jbtw" event={"ID":"64b58c93-4cf7-4623-ac23-b01d1fb62fa4","Type":"ContainerDied","Data":"dbfc198e536173661da640f0911788d9e706f2909d242d9123704f4cb5a86a2d"} Dec 05 12:28:44 crc kubenswrapper[4711]: I1205 12:28:44.926637 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:44 crc kubenswrapper[4711]: I1205 12:28:44.932860 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/421d5855-985f-4d7f-9faf-c868088a7291-etc-swift\") pod \"swift-storage-0\" (UID: \"421d5855-985f-4d7f-9faf-c868088a7291\") " pod="openstack/swift-storage-0" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.014522 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.052463 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.132141 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwj6m\" (UniqueName: \"kubernetes.io/projected/73d6ddd8-359e-4d13-884f-2d3808250318-kube-api-access-fwj6m\") pod \"73d6ddd8-359e-4d13-884f-2d3808250318\" (UID: \"73d6ddd8-359e-4d13-884f-2d3808250318\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.132381 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73d6ddd8-359e-4d13-884f-2d3808250318-operator-scripts\") pod \"73d6ddd8-359e-4d13-884f-2d3808250318\" (UID: \"73d6ddd8-359e-4d13-884f-2d3808250318\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.134457 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73d6ddd8-359e-4d13-884f-2d3808250318-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "73d6ddd8-359e-4d13-884f-2d3808250318" (UID: "73d6ddd8-359e-4d13-884f-2d3808250318"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.143647 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73d6ddd8-359e-4d13-884f-2d3808250318-kube-api-access-fwj6m" (OuterVolumeSpecName: "kube-api-access-fwj6m") pod "73d6ddd8-359e-4d13-884f-2d3808250318" (UID: "73d6ddd8-359e-4d13-884f-2d3808250318"). InnerVolumeSpecName "kube-api-access-fwj6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.164160 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.235956 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run-ovn\") pod \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.236106 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run\") pod \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.236108 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "e0c508c8-2ff0-4dc7-85d2-4e92742665e0" (UID: "e0c508c8-2ff0-4dc7-85d2-4e92742665e0"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.236199 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfvst\" (UniqueName: \"kubernetes.io/projected/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-kube-api-access-xfvst\") pod \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.236242 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-scripts\") pod \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.236204 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run" (OuterVolumeSpecName: "var-run") pod "e0c508c8-2ff0-4dc7-85d2-4e92742665e0" (UID: "e0c508c8-2ff0-4dc7-85d2-4e92742665e0"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.236272 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-log-ovn\") pod \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.236319 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "e0c508c8-2ff0-4dc7-85d2-4e92742665e0" (UID: "e0c508c8-2ff0-4dc7-85d2-4e92742665e0"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.236459 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-additional-scripts\") pod \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\" (UID: \"e0c508c8-2ff0-4dc7-85d2-4e92742665e0\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.237185 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/73d6ddd8-359e-4d13-884f-2d3808250318-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.237213 4711 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.237228 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwj6m\" (UniqueName: \"kubernetes.io/projected/73d6ddd8-359e-4d13-884f-2d3808250318-kube-api-access-fwj6m\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.237243 4711 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.237255 4711 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.237358 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "e0c508c8-2ff0-4dc7-85d2-4e92742665e0" (UID: "e0c508c8-2ff0-4dc7-85d2-4e92742665e0"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.237598 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-scripts" (OuterVolumeSpecName: "scripts") pod "e0c508c8-2ff0-4dc7-85d2-4e92742665e0" (UID: "e0c508c8-2ff0-4dc7-85d2-4e92742665e0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.248616 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-kube-api-access-xfvst" (OuterVolumeSpecName: "kube-api-access-xfvst") pod "e0c508c8-2ff0-4dc7-85d2-4e92742665e0" (UID: "e0c508c8-2ff0-4dc7-85d2-4e92742665e0"). InnerVolumeSpecName "kube-api-access-xfvst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.278334 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.339247 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfvst\" (UniqueName: \"kubernetes.io/projected/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-kube-api-access-xfvst\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.339290 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.339305 4711 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c508c8-2ff0-4dc7-85d2-4e92742665e0-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.440520 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/697913df-02ee-441f-a751-ba53f26158ed-operator-scripts\") pod \"697913df-02ee-441f-a751-ba53f26158ed\" (UID: \"697913df-02ee-441f-a751-ba53f26158ed\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.440632 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4v6bk\" (UniqueName: \"kubernetes.io/projected/697913df-02ee-441f-a751-ba53f26158ed-kube-api-access-4v6bk\") pod \"697913df-02ee-441f-a751-ba53f26158ed\" (UID: \"697913df-02ee-441f-a751-ba53f26158ed\") " Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.441375 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/697913df-02ee-441f-a751-ba53f26158ed-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "697913df-02ee-441f-a751-ba53f26158ed" (UID: "697913df-02ee-441f-a751-ba53f26158ed"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.441717 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/697913df-02ee-441f-a751-ba53f26158ed-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.445830 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/697913df-02ee-441f-a751-ba53f26158ed-kube-api-access-4v6bk" (OuterVolumeSpecName: "kube-api-access-4v6bk") pod "697913df-02ee-441f-a751-ba53f26158ed" (UID: "697913df-02ee-441f-a751-ba53f26158ed"). InnerVolumeSpecName "kube-api-access-4v6bk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.543474 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4v6bk\" (UniqueName: \"kubernetes.io/projected/697913df-02ee-441f-a751-ba53f26158ed-kube-api-access-4v6bk\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.646744 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"2330b767-24ec-4c55-9458-73ff85a96bc7","Type":"ContainerStarted","Data":"02ecc6004e2b88fb2fdfc6458e65f9b9cfbf0afa56cc7509224cb7efc8c00274"} Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.647938 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.650072 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-b300-account-create-update-nd6hj" event={"ID":"697913df-02ee-441f-a751-ba53f26158ed","Type":"ContainerDied","Data":"e0d059b287b8444eeca9dce90e2260b2d00e1cd355d174d557a0a24221a929d5"} Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.650096 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0d059b287b8444eeca9dce90e2260b2d00e1cd355d174d557a0a24221a929d5" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.650136 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-b300-account-create-update-nd6hj" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.656232 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4wg9n-config-6xs2c" event={"ID":"e0c508c8-2ff0-4dc7-85d2-4e92742665e0","Type":"ContainerDied","Data":"74648bc47abd7873cbb6de3fcbaf0ad5855410b2b9077ea768359bbb6af37c89"} Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.656255 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4wg9n-config-6xs2c" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.656271 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74648bc47abd7873cbb6de3fcbaf0ad5855410b2b9077ea768359bbb6af37c89" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.666308 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-9d6hb" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.666396 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-9d6hb" event={"ID":"73d6ddd8-359e-4d13-884f-2d3808250318","Type":"ContainerDied","Data":"db272dc09980bf1fbb9c257c532532623af67d1528c64ec8d46a48b27ab38f74"} Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.666485 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db272dc09980bf1fbb9c257c532532623af67d1528c64ec8d46a48b27ab38f74" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.685101 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.353836226 podStartE2EDuration="30.685068678s" podCreationTimestamp="2025-12-05 12:28:15 +0000 UTC" firstStartedPulling="2025-12-05 12:28:16.455724404 +0000 UTC m=+1142.040046734" lastFinishedPulling="2025-12-05 12:28:44.786956856 +0000 UTC m=+1170.371279186" observedRunningTime="2025-12-05 12:28:45.675929884 +0000 UTC m=+1171.260252234" watchObservedRunningTime="2025-12-05 12:28:45.685068678 +0000 UTC m=+1171.269390998" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.744443 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.106:5671: connect: connection refused" Dec 05 12:28:45 crc kubenswrapper[4711]: I1205 12:28:45.976145 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 12:28:45 crc kubenswrapper[4711]: W1205 12:28:45.979642 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod421d5855_985f_4d7f_9faf_c868088a7291.slice/crio-7d91f31289236d693652437d000a5d67da8cd69239412930721fdff47c3d61da WatchSource:0}: Error finding container 7d91f31289236d693652437d000a5d67da8cd69239412930721fdff47c3d61da: Status 404 returned error can't find the container with id 7d91f31289236d693652437d000a5d67da8cd69239412930721fdff47c3d61da Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.037005 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.153638 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-swiftconf\") pod \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.153692 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-combined-ca-bundle\") pod \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.153758 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-ring-data-devices\") pod \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.153804 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdpvf\" (UniqueName: \"kubernetes.io/projected/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-kube-api-access-zdpvf\") pod \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.153902 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-etc-swift\") pod \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.154309 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-dispersionconf\") pod \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.154338 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "64b58c93-4cf7-4623-ac23-b01d1fb62fa4" (UID: "64b58c93-4cf7-4623-ac23-b01d1fb62fa4"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.154381 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-scripts\") pod \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\" (UID: \"64b58c93-4cf7-4623-ac23-b01d1fb62fa4\") " Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.154901 4711 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.154999 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "64b58c93-4cf7-4623-ac23-b01d1fb62fa4" (UID: "64b58c93-4cf7-4623-ac23-b01d1fb62fa4"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.159659 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-kube-api-access-zdpvf" (OuterVolumeSpecName: "kube-api-access-zdpvf") pod "64b58c93-4cf7-4623-ac23-b01d1fb62fa4" (UID: "64b58c93-4cf7-4623-ac23-b01d1fb62fa4"). InnerVolumeSpecName "kube-api-access-zdpvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.165543 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "64b58c93-4cf7-4623-ac23-b01d1fb62fa4" (UID: "64b58c93-4cf7-4623-ac23-b01d1fb62fa4"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.182558 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "64b58c93-4cf7-4623-ac23-b01d1fb62fa4" (UID: "64b58c93-4cf7-4623-ac23-b01d1fb62fa4"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.186347 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4wg9n-config-6xs2c"] Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.192898 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4wg9n-config-6xs2c"] Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.199355 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-scripts" (OuterVolumeSpecName: "scripts") pod "64b58c93-4cf7-4623-ac23-b01d1fb62fa4" (UID: "64b58c93-4cf7-4623-ac23-b01d1fb62fa4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.207732 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64b58c93-4cf7-4623-ac23-b01d1fb62fa4" (UID: "64b58c93-4cf7-4623-ac23-b01d1fb62fa4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.256454 4711 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.256500 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.256514 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdpvf\" (UniqueName: \"kubernetes.io/projected/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-kube-api-access-zdpvf\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.256526 4711 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.256538 4711 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.256548 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64b58c93-4cf7-4623-ac23-b01d1fb62fa4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.680036 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2jbtw" event={"ID":"64b58c93-4cf7-4623-ac23-b01d1fb62fa4","Type":"ContainerDied","Data":"40afcb441ec983a6cf079186dc08418e5a33616cdca7f0135bf44082b721f8ab"} Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.680104 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40afcb441ec983a6cf079186dc08418e5a33616cdca7f0135bf44082b721f8ab" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.680201 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2jbtw" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.702831 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0c508c8-2ff0-4dc7-85d2-4e92742665e0" path="/var/lib/kubelet/pods/e0c508c8-2ff0-4dc7-85d2-4e92742665e0/volumes" Dec 05 12:28:46 crc kubenswrapper[4711]: I1205 12:28:46.704091 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"7d91f31289236d693652437d000a5d67da8cd69239412930721fdff47c3d61da"} Dec 05 12:28:47 crc kubenswrapper[4711]: I1205 12:28:47.695586 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"60a120b328ab0ec6853637d3f27fe942aa3cd35590289bf561f9711ed037200f"} Dec 05 12:28:47 crc kubenswrapper[4711]: I1205 12:28:47.695922 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"3200b5d026e6e8797a3d645870f4d9b510c71a00f3359591c2d7c19d1ca2cc1a"} Dec 05 12:28:47 crc kubenswrapper[4711]: I1205 12:28:47.695937 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"a19d6628685db1fa062ad55d698d86d496ddfa83d26111e7dbe7be9a60cc173a"} Dec 05 12:28:48 crc kubenswrapper[4711]: I1205 12:28:48.114255 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.075753 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-z6v79"] Dec 05 12:28:49 crc kubenswrapper[4711]: E1205 12:28:49.076437 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73d6ddd8-359e-4d13-884f-2d3808250318" containerName="mariadb-database-create" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.076449 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="73d6ddd8-359e-4d13-884f-2d3808250318" containerName="mariadb-database-create" Dec 05 12:28:49 crc kubenswrapper[4711]: E1205 12:28:49.076464 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b58c93-4cf7-4623-ac23-b01d1fb62fa4" containerName="swift-ring-rebalance" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.076470 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b58c93-4cf7-4623-ac23-b01d1fb62fa4" containerName="swift-ring-rebalance" Dec 05 12:28:49 crc kubenswrapper[4711]: E1205 12:28:49.076490 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0c508c8-2ff0-4dc7-85d2-4e92742665e0" containerName="ovn-config" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.076497 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0c508c8-2ff0-4dc7-85d2-4e92742665e0" containerName="ovn-config" Dec 05 12:28:49 crc kubenswrapper[4711]: E1205 12:28:49.076507 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="697913df-02ee-441f-a751-ba53f26158ed" containerName="mariadb-account-create-update" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.076513 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="697913df-02ee-441f-a751-ba53f26158ed" containerName="mariadb-account-create-update" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.076688 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b58c93-4cf7-4623-ac23-b01d1fb62fa4" containerName="swift-ring-rebalance" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.076716 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0c508c8-2ff0-4dc7-85d2-4e92742665e0" containerName="ovn-config" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.076725 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="697913df-02ee-441f-a751-ba53f26158ed" containerName="mariadb-account-create-update" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.076738 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="73d6ddd8-359e-4d13-884f-2d3808250318" containerName="mariadb-database-create" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.077264 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.095581 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-z6v79"] Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.178307 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-4wg9n" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.213481 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b66d472-e836-4788-a4f5-2cfc3b269e24-operator-scripts\") pod \"keystone-db-create-z6v79\" (UID: \"4b66d472-e836-4788-a4f5-2cfc3b269e24\") " pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.213698 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgtrm\" (UniqueName: \"kubernetes.io/projected/4b66d472-e836-4788-a4f5-2cfc3b269e24-kube-api-access-qgtrm\") pod \"keystone-db-create-z6v79\" (UID: \"4b66d472-e836-4788-a4f5-2cfc3b269e24\") " pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.214954 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-31c6-account-create-update-988z6"] Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.217286 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.223177 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.244600 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-31c6-account-create-update-988z6"] Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.324063 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-operator-scripts\") pod \"keystone-31c6-account-create-update-988z6\" (UID: \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\") " pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.324171 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7h6l\" (UniqueName: \"kubernetes.io/projected/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-kube-api-access-f7h6l\") pod \"keystone-31c6-account-create-update-988z6\" (UID: \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\") " pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.324595 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgtrm\" (UniqueName: \"kubernetes.io/projected/4b66d472-e836-4788-a4f5-2cfc3b269e24-kube-api-access-qgtrm\") pod \"keystone-db-create-z6v79\" (UID: \"4b66d472-e836-4788-a4f5-2cfc3b269e24\") " pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.324671 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b66d472-e836-4788-a4f5-2cfc3b269e24-operator-scripts\") pod \"keystone-db-create-z6v79\" (UID: \"4b66d472-e836-4788-a4f5-2cfc3b269e24\") " pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.327626 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b66d472-e836-4788-a4f5-2cfc3b269e24-operator-scripts\") pod \"keystone-db-create-z6v79\" (UID: \"4b66d472-e836-4788-a4f5-2cfc3b269e24\") " pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.367235 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgtrm\" (UniqueName: \"kubernetes.io/projected/4b66d472-e836-4788-a4f5-2cfc3b269e24-kube-api-access-qgtrm\") pod \"keystone-db-create-z6v79\" (UID: \"4b66d472-e836-4788-a4f5-2cfc3b269e24\") " pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.403770 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-rqxct"] Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.405213 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rqxct" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.417145 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.423918 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-rqxct"] Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.427293 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-operator-scripts\") pod \"keystone-31c6-account-create-update-988z6\" (UID: \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\") " pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.427374 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7h6l\" (UniqueName: \"kubernetes.io/projected/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-kube-api-access-f7h6l\") pod \"keystone-31c6-account-create-update-988z6\" (UID: \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\") " pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.427456 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvbdx\" (UniqueName: \"kubernetes.io/projected/107c5bb6-4c54-4e09-9dc5-2f777321d66c-kube-api-access-bvbdx\") pod \"placement-db-create-rqxct\" (UID: \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\") " pod="openstack/placement-db-create-rqxct" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.427564 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/107c5bb6-4c54-4e09-9dc5-2f777321d66c-operator-scripts\") pod \"placement-db-create-rqxct\" (UID: \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\") " pod="openstack/placement-db-create-rqxct" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.429293 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-operator-scripts\") pod \"keystone-31c6-account-create-update-988z6\" (UID: \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\") " pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.453619 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7h6l\" (UniqueName: \"kubernetes.io/projected/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-kube-api-access-f7h6l\") pod \"keystone-31c6-account-create-update-988z6\" (UID: \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\") " pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.528720 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/107c5bb6-4c54-4e09-9dc5-2f777321d66c-operator-scripts\") pod \"placement-db-create-rqxct\" (UID: \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\") " pod="openstack/placement-db-create-rqxct" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.528861 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvbdx\" (UniqueName: \"kubernetes.io/projected/107c5bb6-4c54-4e09-9dc5-2f777321d66c-kube-api-access-bvbdx\") pod \"placement-db-create-rqxct\" (UID: \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\") " pod="openstack/placement-db-create-rqxct" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.530011 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/107c5bb6-4c54-4e09-9dc5-2f777321d66c-operator-scripts\") pod \"placement-db-create-rqxct\" (UID: \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\") " pod="openstack/placement-db-create-rqxct" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.543673 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8d9f-account-create-update-jcgcd"] Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.544091 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.546713 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.551025 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.555790 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8d9f-account-create-update-jcgcd"] Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.567179 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvbdx\" (UniqueName: \"kubernetes.io/projected/107c5bb6-4c54-4e09-9dc5-2f777321d66c-kube-api-access-bvbdx\") pod \"placement-db-create-rqxct\" (UID: \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\") " pod="openstack/placement-db-create-rqxct" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.630950 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66eadbd3-94ed-46b4-abb4-7a9ead434641-operator-scripts\") pod \"placement-8d9f-account-create-update-jcgcd\" (UID: \"66eadbd3-94ed-46b4-abb4-7a9ead434641\") " pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.631037 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqs6z\" (UniqueName: \"kubernetes.io/projected/66eadbd3-94ed-46b4-abb4-7a9ead434641-kube-api-access-jqs6z\") pod \"placement-8d9f-account-create-update-jcgcd\" (UID: \"66eadbd3-94ed-46b4-abb4-7a9ead434641\") " pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.725792 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"dbd9cacc7688db69fe70c15c8b74f75020be2660791d41ecfc50e385d1b3fe42"} Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.735623 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66eadbd3-94ed-46b4-abb4-7a9ead434641-operator-scripts\") pod \"placement-8d9f-account-create-update-jcgcd\" (UID: \"66eadbd3-94ed-46b4-abb4-7a9ead434641\") " pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.735713 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqs6z\" (UniqueName: \"kubernetes.io/projected/66eadbd3-94ed-46b4-abb4-7a9ead434641-kube-api-access-jqs6z\") pod \"placement-8d9f-account-create-update-jcgcd\" (UID: \"66eadbd3-94ed-46b4-abb4-7a9ead434641\") " pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.737248 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66eadbd3-94ed-46b4-abb4-7a9ead434641-operator-scripts\") pod \"placement-8d9f-account-create-update-jcgcd\" (UID: \"66eadbd3-94ed-46b4-abb4-7a9ead434641\") " pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.748066 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rqxct" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.764618 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqs6z\" (UniqueName: \"kubernetes.io/projected/66eadbd3-94ed-46b4-abb4-7a9ead434641-kube-api-access-jqs6z\") pod \"placement-8d9f-account-create-update-jcgcd\" (UID: \"66eadbd3-94ed-46b4-abb4-7a9ead434641\") " pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:49 crc kubenswrapper[4711]: I1205 12:28:49.867357 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.070642 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-31c6-account-create-update-988z6"] Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.188364 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-z6v79"] Dec 05 12:28:50 crc kubenswrapper[4711]: W1205 12:28:50.211442 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b66d472_e836_4788_a4f5_2cfc3b269e24.slice/crio-a043e25e33b23d649e7d9aec79bb84f801a5e3567dc4c5cdac93454bd13a857c WatchSource:0}: Error finding container a043e25e33b23d649e7d9aec79bb84f801a5e3567dc4c5cdac93454bd13a857c: Status 404 returned error can't find the container with id a043e25e33b23d649e7d9aec79bb84f801a5e3567dc4c5cdac93454bd13a857c Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.358831 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-rqxct"] Dec 05 12:28:50 crc kubenswrapper[4711]: W1205 12:28:50.368123 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod107c5bb6_4c54_4e09_9dc5_2f777321d66c.slice/crio-f73229be135cc54841485df83ecbb76db94c0084bbbdb87c890cef5ff0265bd5 WatchSource:0}: Error finding container f73229be135cc54841485df83ecbb76db94c0084bbbdb87c890cef5ff0265bd5: Status 404 returned error can't find the container with id f73229be135cc54841485df83ecbb76db94c0084bbbdb87c890cef5ff0265bd5 Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.540907 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8d9f-account-create-update-jcgcd"] Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.744036 4711 generic.go:334] "Generic (PLEG): container finished" podID="2e40033e-bd5c-45ee-b9d9-5c02304fd76d" containerID="241c5ba0f8b47b1f2281e6d3d659be3e8b8b75f6c1b56adedc7d73fcd728d81f" exitCode=0 Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.744164 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-31c6-account-create-update-988z6" event={"ID":"2e40033e-bd5c-45ee-b9d9-5c02304fd76d","Type":"ContainerDied","Data":"241c5ba0f8b47b1f2281e6d3d659be3e8b8b75f6c1b56adedc7d73fcd728d81f"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.744214 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-31c6-account-create-update-988z6" event={"ID":"2e40033e-bd5c-45ee-b9d9-5c02304fd76d","Type":"ContainerStarted","Data":"52c28c0aece164b3b8ef9503161077eb5ba0b4e4f66b07258e1dd1386fb0e03b"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.750015 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-rqxct" event={"ID":"107c5bb6-4c54-4e09-9dc5-2f777321d66c","Type":"ContainerStarted","Data":"f73229be135cc54841485df83ecbb76db94c0084bbbdb87c890cef5ff0265bd5"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.761627 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"ffcabb68470a4193e45903f9ddd2f8f772667f2f17d94d8cf9148050337974d1"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.761705 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"3296159ef73662148ad58770f2644d3b5d2634a35c0f68d89cbec2121d901799"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.761721 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"34aa0ecd3d63c41b9f9266ebe5ba0dd1ec36336f750f8b4773bdd88290733b60"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.761732 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"2d39b794f2fbe51ed85c29de1bb7daf5a8bb817b2acfdba57a668aeb5fd84003"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.763802 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-z6v79" event={"ID":"4b66d472-e836-4788-a4f5-2cfc3b269e24","Type":"ContainerStarted","Data":"b691811c358b80e304d8a1700b70ffdef4f3322967b67174551d323bf4e0449d"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.763835 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-z6v79" event={"ID":"4b66d472-e836-4788-a4f5-2cfc3b269e24","Type":"ContainerStarted","Data":"a043e25e33b23d649e7d9aec79bb84f801a5e3567dc4c5cdac93454bd13a857c"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.765867 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8d9f-account-create-update-jcgcd" event={"ID":"66eadbd3-94ed-46b4-abb4-7a9ead434641","Type":"ContainerStarted","Data":"a691ff5bec1ea5626f25e5039b6d13164e2335ad38eb599e61168b25e96b53ce"} Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.818885 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-z6v79" podStartSLOduration=1.818861042 podStartE2EDuration="1.818861042s" podCreationTimestamp="2025-12-05 12:28:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:50.788121528 +0000 UTC m=+1176.372443868" watchObservedRunningTime="2025-12-05 12:28:50.818861042 +0000 UTC m=+1176.403183372" Dec 05 12:28:50 crc kubenswrapper[4711]: I1205 12:28:50.834849 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-rqxct" podStartSLOduration=1.8348253730000001 podStartE2EDuration="1.834825373s" podCreationTimestamp="2025-12-05 12:28:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:50.834143216 +0000 UTC m=+1176.418465546" watchObservedRunningTime="2025-12-05 12:28:50.834825373 +0000 UTC m=+1176.419147703" Dec 05 12:28:51 crc kubenswrapper[4711]: I1205 12:28:51.776111 4711 generic.go:334] "Generic (PLEG): container finished" podID="107c5bb6-4c54-4e09-9dc5-2f777321d66c" containerID="c1fe3f2c2dfa3c546d4a567302f6a525c40282af87881b216b196a142e3262d8" exitCode=0 Dec 05 12:28:51 crc kubenswrapper[4711]: I1205 12:28:51.776175 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-rqxct" event={"ID":"107c5bb6-4c54-4e09-9dc5-2f777321d66c","Type":"ContainerDied","Data":"c1fe3f2c2dfa3c546d4a567302f6a525c40282af87881b216b196a142e3262d8"} Dec 05 12:28:51 crc kubenswrapper[4711]: I1205 12:28:51.778833 4711 generic.go:334] "Generic (PLEG): container finished" podID="4b66d472-e836-4788-a4f5-2cfc3b269e24" containerID="b691811c358b80e304d8a1700b70ffdef4f3322967b67174551d323bf4e0449d" exitCode=0 Dec 05 12:28:51 crc kubenswrapper[4711]: I1205 12:28:51.778924 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-z6v79" event={"ID":"4b66d472-e836-4788-a4f5-2cfc3b269e24","Type":"ContainerDied","Data":"b691811c358b80e304d8a1700b70ffdef4f3322967b67174551d323bf4e0449d"} Dec 05 12:28:51 crc kubenswrapper[4711]: I1205 12:28:51.780550 4711 generic.go:334] "Generic (PLEG): container finished" podID="66eadbd3-94ed-46b4-abb4-7a9ead434641" containerID="e266897b512b90204c11e8c27b312996941c58fce56c515d9b381875f129997b" exitCode=0 Dec 05 12:28:51 crc kubenswrapper[4711]: I1205 12:28:51.780618 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8d9f-account-create-update-jcgcd" event={"ID":"66eadbd3-94ed-46b4-abb4-7a9ead434641","Type":"ContainerDied","Data":"e266897b512b90204c11e8c27b312996941c58fce56c515d9b381875f129997b"} Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.181987 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.302451 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7h6l\" (UniqueName: \"kubernetes.io/projected/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-kube-api-access-f7h6l\") pod \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\" (UID: \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\") " Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.302563 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-operator-scripts\") pod \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\" (UID: \"2e40033e-bd5c-45ee-b9d9-5c02304fd76d\") " Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.306869 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2e40033e-bd5c-45ee-b9d9-5c02304fd76d" (UID: "2e40033e-bd5c-45ee-b9d9-5c02304fd76d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.310168 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-kube-api-access-f7h6l" (OuterVolumeSpecName: "kube-api-access-f7h6l") pod "2e40033e-bd5c-45ee-b9d9-5c02304fd76d" (UID: "2e40033e-bd5c-45ee-b9d9-5c02304fd76d"). InnerVolumeSpecName "kube-api-access-f7h6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.405479 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7h6l\" (UniqueName: \"kubernetes.io/projected/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-kube-api-access-f7h6l\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.405950 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e40033e-bd5c-45ee-b9d9-5c02304fd76d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.794444 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-31c6-account-create-update-988z6" event={"ID":"2e40033e-bd5c-45ee-b9d9-5c02304fd76d","Type":"ContainerDied","Data":"52c28c0aece164b3b8ef9503161077eb5ba0b4e4f66b07258e1dd1386fb0e03b"} Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.794800 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52c28c0aece164b3b8ef9503161077eb5ba0b4e4f66b07258e1dd1386fb0e03b" Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.794492 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-31c6-account-create-update-988z6" Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.800597 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"a35fe61e91f96f3a9fb287c7545f54973eefdb2280a91707f1a96825c9cbca50"} Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.800648 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"f9c2fff95a5fb312f9904af677082a54c07f89dab06840678fc3f1e88a12f22d"} Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.800661 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"f408fb92a9b8e3e3bf1427e24ecf3108ed72d688d1533f4bb4da8ea4c8803003"} Dec 05 12:28:52 crc kubenswrapper[4711]: I1205 12:28:52.800673 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"09e2d18fa74674461b46a422c264cfd7522dfb081a0e40191120537bdf6ee101"} Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.118691 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.170698 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.223529 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgtrm\" (UniqueName: \"kubernetes.io/projected/4b66d472-e836-4788-a4f5-2cfc3b269e24-kube-api-access-qgtrm\") pod \"4b66d472-e836-4788-a4f5-2cfc3b269e24\" (UID: \"4b66d472-e836-4788-a4f5-2cfc3b269e24\") " Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.223653 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqs6z\" (UniqueName: \"kubernetes.io/projected/66eadbd3-94ed-46b4-abb4-7a9ead434641-kube-api-access-jqs6z\") pod \"66eadbd3-94ed-46b4-abb4-7a9ead434641\" (UID: \"66eadbd3-94ed-46b4-abb4-7a9ead434641\") " Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.223759 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66eadbd3-94ed-46b4-abb4-7a9ead434641-operator-scripts\") pod \"66eadbd3-94ed-46b4-abb4-7a9ead434641\" (UID: \"66eadbd3-94ed-46b4-abb4-7a9ead434641\") " Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.224224 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b66d472-e836-4788-a4f5-2cfc3b269e24-operator-scripts\") pod \"4b66d472-e836-4788-a4f5-2cfc3b269e24\" (UID: \"4b66d472-e836-4788-a4f5-2cfc3b269e24\") " Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.224829 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b66d472-e836-4788-a4f5-2cfc3b269e24-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4b66d472-e836-4788-a4f5-2cfc3b269e24" (UID: "4b66d472-e836-4788-a4f5-2cfc3b269e24"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.224839 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66eadbd3-94ed-46b4-abb4-7a9ead434641-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66eadbd3-94ed-46b4-abb4-7a9ead434641" (UID: "66eadbd3-94ed-46b4-abb4-7a9ead434641"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.233108 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b66d472-e836-4788-a4f5-2cfc3b269e24-kube-api-access-qgtrm" (OuterVolumeSpecName: "kube-api-access-qgtrm") pod "4b66d472-e836-4788-a4f5-2cfc3b269e24" (UID: "4b66d472-e836-4788-a4f5-2cfc3b269e24"). InnerVolumeSpecName "kube-api-access-qgtrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.236052 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66eadbd3-94ed-46b4-abb4-7a9ead434641-kube-api-access-jqs6z" (OuterVolumeSpecName: "kube-api-access-jqs6z") pod "66eadbd3-94ed-46b4-abb4-7a9ead434641" (UID: "66eadbd3-94ed-46b4-abb4-7a9ead434641"). InnerVolumeSpecName "kube-api-access-jqs6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.247365 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rqxct" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.325584 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/107c5bb6-4c54-4e09-9dc5-2f777321d66c-operator-scripts\") pod \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\" (UID: \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\") " Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.325635 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvbdx\" (UniqueName: \"kubernetes.io/projected/107c5bb6-4c54-4e09-9dc5-2f777321d66c-kube-api-access-bvbdx\") pod \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\" (UID: \"107c5bb6-4c54-4e09-9dc5-2f777321d66c\") " Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.326068 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66eadbd3-94ed-46b4-abb4-7a9ead434641-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.326090 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b66d472-e836-4788-a4f5-2cfc3b269e24-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.326105 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgtrm\" (UniqueName: \"kubernetes.io/projected/4b66d472-e836-4788-a4f5-2cfc3b269e24-kube-api-access-qgtrm\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.326115 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqs6z\" (UniqueName: \"kubernetes.io/projected/66eadbd3-94ed-46b4-abb4-7a9ead434641-kube-api-access-jqs6z\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.327017 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107c5bb6-4c54-4e09-9dc5-2f777321d66c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "107c5bb6-4c54-4e09-9dc5-2f777321d66c" (UID: "107c5bb6-4c54-4e09-9dc5-2f777321d66c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.329299 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107c5bb6-4c54-4e09-9dc5-2f777321d66c-kube-api-access-bvbdx" (OuterVolumeSpecName: "kube-api-access-bvbdx") pod "107c5bb6-4c54-4e09-9dc5-2f777321d66c" (UID: "107c5bb6-4c54-4e09-9dc5-2f777321d66c"). InnerVolumeSpecName "kube-api-access-bvbdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.427551 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/107c5bb6-4c54-4e09-9dc5-2f777321d66c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.427586 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvbdx\" (UniqueName: \"kubernetes.io/projected/107c5bb6-4c54-4e09-9dc5-2f777321d66c-kube-api-access-bvbdx\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.809440 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-rqxct" event={"ID":"107c5bb6-4c54-4e09-9dc5-2f777321d66c","Type":"ContainerDied","Data":"f73229be135cc54841485df83ecbb76db94c0084bbbdb87c890cef5ff0265bd5"} Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.809477 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f73229be135cc54841485df83ecbb76db94c0084bbbdb87c890cef5ff0265bd5" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.809452 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rqxct" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.810862 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-z6v79" event={"ID":"4b66d472-e836-4788-a4f5-2cfc3b269e24","Type":"ContainerDied","Data":"a043e25e33b23d649e7d9aec79bb84f801a5e3567dc4c5cdac93454bd13a857c"} Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.810883 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a043e25e33b23d649e7d9aec79bb84f801a5e3567dc4c5cdac93454bd13a857c" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.810933 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-z6v79" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.815345 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"50c82a73b7088202bc41a534913b1bc85f6e5abcc273977b4d16c1c8a6b96727"} Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.815498 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"3b767a202cd0ac42f4ad7afbf60bede2ed6acd9e9b94d6d6848d17b744a3b4d6"} Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.815527 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"421d5855-985f-4d7f-9faf-c868088a7291","Type":"ContainerStarted","Data":"68728c1add31feb0bdd08a38274ccf939d512779a55fd9eda3ba3f7aa23ef3ce"} Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.816837 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8d9f-account-create-update-jcgcd" event={"ID":"66eadbd3-94ed-46b4-abb4-7a9ead434641","Type":"ContainerDied","Data":"a691ff5bec1ea5626f25e5039b6d13164e2335ad38eb599e61168b25e96b53ce"} Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.816865 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a691ff5bec1ea5626f25e5039b6d13164e2335ad38eb599e61168b25e96b53ce" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.816915 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8d9f-account-create-update-jcgcd" Dec 05 12:28:53 crc kubenswrapper[4711]: I1205 12:28:53.854479 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=37.212559343 podStartE2EDuration="42.854458165s" podCreationTimestamp="2025-12-05 12:28:11 +0000 UTC" firstStartedPulling="2025-12-05 12:28:45.982259875 +0000 UTC m=+1171.566582205" lastFinishedPulling="2025-12-05 12:28:51.624158697 +0000 UTC m=+1177.208481027" observedRunningTime="2025-12-05 12:28:53.847852263 +0000 UTC m=+1179.432174593" watchObservedRunningTime="2025-12-05 12:28:53.854458165 +0000 UTC m=+1179.438780495" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.131039 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-859dfd4ddf-jlv5q"] Dec 05 12:28:54 crc kubenswrapper[4711]: E1205 12:28:54.131357 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e40033e-bd5c-45ee-b9d9-5c02304fd76d" containerName="mariadb-account-create-update" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.131372 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e40033e-bd5c-45ee-b9d9-5c02304fd76d" containerName="mariadb-account-create-update" Dec 05 12:28:54 crc kubenswrapper[4711]: E1205 12:28:54.131498 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107c5bb6-4c54-4e09-9dc5-2f777321d66c" containerName="mariadb-database-create" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.131508 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="107c5bb6-4c54-4e09-9dc5-2f777321d66c" containerName="mariadb-database-create" Dec 05 12:28:54 crc kubenswrapper[4711]: E1205 12:28:54.131533 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b66d472-e836-4788-a4f5-2cfc3b269e24" containerName="mariadb-database-create" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.131542 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b66d472-e836-4788-a4f5-2cfc3b269e24" containerName="mariadb-database-create" Dec 05 12:28:54 crc kubenswrapper[4711]: E1205 12:28:54.131556 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66eadbd3-94ed-46b4-abb4-7a9ead434641" containerName="mariadb-account-create-update" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.131562 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="66eadbd3-94ed-46b4-abb4-7a9ead434641" containerName="mariadb-account-create-update" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.131713 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e40033e-bd5c-45ee-b9d9-5c02304fd76d" containerName="mariadb-account-create-update" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.131724 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="66eadbd3-94ed-46b4-abb4-7a9ead434641" containerName="mariadb-account-create-update" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.131738 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="107c5bb6-4c54-4e09-9dc5-2f777321d66c" containerName="mariadb-database-create" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.131750 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b66d472-e836-4788-a4f5-2cfc3b269e24" containerName="mariadb-database-create" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.132614 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.137179 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.155288 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-859dfd4ddf-jlv5q"] Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.241698 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-sb\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.241759 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-nb\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.241787 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-config\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.241814 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-svc\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.241945 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpxqt\" (UniqueName: \"kubernetes.io/projected/cc0d288d-3af3-4af4-9461-a70f2e14d5da-kube-api-access-dpxqt\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.242028 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-swift-storage-0\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.343885 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpxqt\" (UniqueName: \"kubernetes.io/projected/cc0d288d-3af3-4af4-9461-a70f2e14d5da-kube-api-access-dpxqt\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.343977 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-swift-storage-0\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.344036 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-sb\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.344065 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-nb\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.344087 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-config\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.344104 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-svc\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.345225 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-config\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.345252 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-swift-storage-0\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.345239 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-sb\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.345344 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-nb\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.345781 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-svc\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.366606 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpxqt\" (UniqueName: \"kubernetes.io/projected/cc0d288d-3af3-4af4-9461-a70f2e14d5da-kube-api-access-dpxqt\") pod \"dnsmasq-dns-859dfd4ddf-jlv5q\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:54 crc kubenswrapper[4711]: I1205 12:28:54.451051 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:55 crc kubenswrapper[4711]: I1205 12:28:55.026064 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-859dfd4ddf-jlv5q"] Dec 05 12:28:55 crc kubenswrapper[4711]: I1205 12:28:55.289836 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.105:5671: connect: connection refused" Dec 05 12:28:55 crc kubenswrapper[4711]: I1205 12:28:55.744426 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 12:28:55 crc kubenswrapper[4711]: I1205 12:28:55.838599 4711 generic.go:334] "Generic (PLEG): container finished" podID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerID="56b6c394afe68f3127778fd5764c7f667593cce7b0046b4f0a25d9874d8f6497" exitCode=0 Dec 05 12:28:55 crc kubenswrapper[4711]: I1205 12:28:55.838656 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" event={"ID":"cc0d288d-3af3-4af4-9461-a70f2e14d5da","Type":"ContainerDied","Data":"56b6c394afe68f3127778fd5764c7f667593cce7b0046b4f0a25d9874d8f6497"} Dec 05 12:28:55 crc kubenswrapper[4711]: I1205 12:28:55.838688 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" event={"ID":"cc0d288d-3af3-4af4-9461-a70f2e14d5da","Type":"ContainerStarted","Data":"7f96e36a5a462871a97f87d52912048afc808bc71e48997644c4a5c478a3304b"} Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.075433 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.167985 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-notifications-server-0" podUID="d1b49e15-30ab-4ef7-8980-436468104f7b" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.212954 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-795fh"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.214410 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-795fh" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.233657 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-93db-account-create-update-ft8x5"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.234873 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.237184 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.245904 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-795fh"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.260599 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-93db-account-create-update-ft8x5"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.302543 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxxgl\" (UniqueName: \"kubernetes.io/projected/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-kube-api-access-wxxgl\") pod \"cinder-93db-account-create-update-ft8x5\" (UID: \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\") " pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.302634 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcf9f\" (UniqueName: \"kubernetes.io/projected/2d52e95e-2ce4-4ea1-a9c3-50360150e675-kube-api-access-kcf9f\") pod \"cinder-db-create-795fh\" (UID: \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\") " pod="openstack/cinder-db-create-795fh" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.302695 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d52e95e-2ce4-4ea1-a9c3-50360150e675-operator-scripts\") pod \"cinder-db-create-795fh\" (UID: \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\") " pod="openstack/cinder-db-create-795fh" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.302773 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-operator-scripts\") pod \"cinder-93db-account-create-update-ft8x5\" (UID: \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\") " pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.319863 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-bcc9-account-create-update-qjdq2"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.321283 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.326168 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.337090 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-4htss"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.338527 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4htss" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.349587 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4htss"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.361777 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-bcc9-account-create-update-qjdq2"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.407012 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-operator-scripts\") pod \"cinder-93db-account-create-update-ft8x5\" (UID: \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\") " pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.407582 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxxgl\" (UniqueName: \"kubernetes.io/projected/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-kube-api-access-wxxgl\") pod \"cinder-93db-account-create-update-ft8x5\" (UID: \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\") " pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.407794 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcf9f\" (UniqueName: \"kubernetes.io/projected/2d52e95e-2ce4-4ea1-a9c3-50360150e675-kube-api-access-kcf9f\") pod \"cinder-db-create-795fh\" (UID: \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\") " pod="openstack/cinder-db-create-795fh" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.407952 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bfdb3c4-d406-4d00-b405-2216fbe95943-operator-scripts\") pod \"barbican-db-create-4htss\" (UID: \"8bfdb3c4-d406-4d00-b405-2216fbe95943\") " pod="openstack/barbican-db-create-4htss" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.408123 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500de779-f104-4141-87e8-95e80e23a870-operator-scripts\") pod \"barbican-bcc9-account-create-update-qjdq2\" (UID: \"500de779-f104-4141-87e8-95e80e23a870\") " pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.408246 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d52e95e-2ce4-4ea1-a9c3-50360150e675-operator-scripts\") pod \"cinder-db-create-795fh\" (UID: \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\") " pod="openstack/cinder-db-create-795fh" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.408420 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cslg9\" (UniqueName: \"kubernetes.io/projected/8bfdb3c4-d406-4d00-b405-2216fbe95943-kube-api-access-cslg9\") pod \"barbican-db-create-4htss\" (UID: \"8bfdb3c4-d406-4d00-b405-2216fbe95943\") " pod="openstack/barbican-db-create-4htss" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.408546 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xshrt\" (UniqueName: \"kubernetes.io/projected/500de779-f104-4141-87e8-95e80e23a870-kube-api-access-xshrt\") pod \"barbican-bcc9-account-create-update-qjdq2\" (UID: \"500de779-f104-4141-87e8-95e80e23a870\") " pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.409963 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-operator-scripts\") pod \"cinder-93db-account-create-update-ft8x5\" (UID: \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\") " pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.411641 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d52e95e-2ce4-4ea1-a9c3-50360150e675-operator-scripts\") pod \"cinder-db-create-795fh\" (UID: \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\") " pod="openstack/cinder-db-create-795fh" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.456702 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcf9f\" (UniqueName: \"kubernetes.io/projected/2d52e95e-2ce4-4ea1-a9c3-50360150e675-kube-api-access-kcf9f\") pod \"cinder-db-create-795fh\" (UID: \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\") " pod="openstack/cinder-db-create-795fh" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.461182 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxxgl\" (UniqueName: \"kubernetes.io/projected/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-kube-api-access-wxxgl\") pod \"cinder-93db-account-create-update-ft8x5\" (UID: \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\") " pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.510137 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500de779-f104-4141-87e8-95e80e23a870-operator-scripts\") pod \"barbican-bcc9-account-create-update-qjdq2\" (UID: \"500de779-f104-4141-87e8-95e80e23a870\") " pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.510999 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cslg9\" (UniqueName: \"kubernetes.io/projected/8bfdb3c4-d406-4d00-b405-2216fbe95943-kube-api-access-cslg9\") pod \"barbican-db-create-4htss\" (UID: \"8bfdb3c4-d406-4d00-b405-2216fbe95943\") " pod="openstack/barbican-db-create-4htss" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.511137 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xshrt\" (UniqueName: \"kubernetes.io/projected/500de779-f104-4141-87e8-95e80e23a870-kube-api-access-xshrt\") pod \"barbican-bcc9-account-create-update-qjdq2\" (UID: \"500de779-f104-4141-87e8-95e80e23a870\") " pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.511094 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500de779-f104-4141-87e8-95e80e23a870-operator-scripts\") pod \"barbican-bcc9-account-create-update-qjdq2\" (UID: \"500de779-f104-4141-87e8-95e80e23a870\") " pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.511496 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bfdb3c4-d406-4d00-b405-2216fbe95943-operator-scripts\") pod \"barbican-db-create-4htss\" (UID: \"8bfdb3c4-d406-4d00-b405-2216fbe95943\") " pod="openstack/barbican-db-create-4htss" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.512083 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bfdb3c4-d406-4d00-b405-2216fbe95943-operator-scripts\") pod \"barbican-db-create-4htss\" (UID: \"8bfdb3c4-d406-4d00-b405-2216fbe95943\") " pod="openstack/barbican-db-create-4htss" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.532219 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-np4sn"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.534329 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-795fh" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.535914 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.538019 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.538575 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-7sbtb" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.538731 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.538884 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.544116 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cslg9\" (UniqueName: \"kubernetes.io/projected/8bfdb3c4-d406-4d00-b405-2216fbe95943-kube-api-access-cslg9\") pod \"barbican-db-create-4htss\" (UID: \"8bfdb3c4-d406-4d00-b405-2216fbe95943\") " pod="openstack/barbican-db-create-4htss" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.557616 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.558451 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xshrt\" (UniqueName: \"kubernetes.io/projected/500de779-f104-4141-87e8-95e80e23a870-kube-api-access-xshrt\") pod \"barbican-bcc9-account-create-update-qjdq2\" (UID: \"500de779-f104-4141-87e8-95e80e23a870\") " pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.560726 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-np4sn"] Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.614281 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-config-data\") pod \"keystone-db-sync-np4sn\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.614645 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxwvf\" (UniqueName: \"kubernetes.io/projected/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-kube-api-access-vxwvf\") pod \"keystone-db-sync-np4sn\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.614801 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-combined-ca-bundle\") pod \"keystone-db-sync-np4sn\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.639093 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.659043 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4htss" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.716969 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-config-data\") pod \"keystone-db-sync-np4sn\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.717485 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxwvf\" (UniqueName: \"kubernetes.io/projected/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-kube-api-access-vxwvf\") pod \"keystone-db-sync-np4sn\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.717541 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-combined-ca-bundle\") pod \"keystone-db-sync-np4sn\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.726145 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-config-data\") pod \"keystone-db-sync-np4sn\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.735109 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-combined-ca-bundle\") pod \"keystone-db-sync-np4sn\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.741213 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxwvf\" (UniqueName: \"kubernetes.io/projected/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-kube-api-access-vxwvf\") pod \"keystone-db-sync-np4sn\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.763152 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-np4sn" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.865879 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" event={"ID":"cc0d288d-3af3-4af4-9461-a70f2e14d5da","Type":"ContainerStarted","Data":"62ba5a1faa76c1fe3b40d4beaa6c236d0f5717efe388d3f1ae5ccef8b89cc838"} Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.866101 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:28:56 crc kubenswrapper[4711]: I1205 12:28:56.890964 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" podStartSLOduration=2.890949831 podStartE2EDuration="2.890949831s" podCreationTimestamp="2025-12-05 12:28:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:56.890175433 +0000 UTC m=+1182.474497763" watchObservedRunningTime="2025-12-05 12:28:56.890949831 +0000 UTC m=+1182.475272161" Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.155536 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-795fh"] Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.274327 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-np4sn"] Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.284149 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-93db-account-create-update-ft8x5"] Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.356158 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4htss"] Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.373578 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-bcc9-account-create-update-qjdq2"] Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.876934 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4htss" event={"ID":"8bfdb3c4-d406-4d00-b405-2216fbe95943","Type":"ContainerStarted","Data":"f238c69518f7100223bf01fb4068d87baa6238ccb75af139fe957500d3905ee8"} Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.877248 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4htss" event={"ID":"8bfdb3c4-d406-4d00-b405-2216fbe95943","Type":"ContainerStarted","Data":"8a4f3fb4239c3a7b68c6e263d3f4dbb3dffde4a740e09246537538c9caf023e2"} Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.881749 4711 generic.go:334] "Generic (PLEG): container finished" podID="7e4b2ca5-1164-478f-b5cd-b3a553a5100e" containerID="b784f6ef465395013f1cf88fd1b269c745842264fdbf7c4ad0f83be0aa958acd" exitCode=0 Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.881876 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-93db-account-create-update-ft8x5" event={"ID":"7e4b2ca5-1164-478f-b5cd-b3a553a5100e","Type":"ContainerDied","Data":"b784f6ef465395013f1cf88fd1b269c745842264fdbf7c4ad0f83be0aa958acd"} Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.881935 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-93db-account-create-update-ft8x5" event={"ID":"7e4b2ca5-1164-478f-b5cd-b3a553a5100e","Type":"ContainerStarted","Data":"a9d288378ac4a80d2b950c10eda6b86eea2ddbc2438af055653805e97e3ada43"} Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.883724 4711 generic.go:334] "Generic (PLEG): container finished" podID="2d52e95e-2ce4-4ea1-a9c3-50360150e675" containerID="5314bd7fc52affb14a5640a4fb32585b823e3290afe7e83b5c2071facfc9f823" exitCode=0 Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.883781 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-795fh" event={"ID":"2d52e95e-2ce4-4ea1-a9c3-50360150e675","Type":"ContainerDied","Data":"5314bd7fc52affb14a5640a4fb32585b823e3290afe7e83b5c2071facfc9f823"} Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.883803 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-795fh" event={"ID":"2d52e95e-2ce4-4ea1-a9c3-50360150e675","Type":"ContainerStarted","Data":"5831d38d4e13ccff741182a802ce4f1a23760e35e8d14048939ccbf7d2544999"} Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.886748 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-bcc9-account-create-update-qjdq2" event={"ID":"500de779-f104-4141-87e8-95e80e23a870","Type":"ContainerStarted","Data":"c8c9107256b64f80317aa1a558204b85dc735cfc8eabcf4558e2305e84582aa3"} Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.886792 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-bcc9-account-create-update-qjdq2" event={"ID":"500de779-f104-4141-87e8-95e80e23a870","Type":"ContainerStarted","Data":"e63a383031ae5198e3958995ea26d251afbc09ef5df6adbe7a8dedbd18b815ac"} Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.891596 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-np4sn" event={"ID":"2aadbf74-6733-453e-bdfc-f8b8bb7f60da","Type":"ContainerStarted","Data":"168e0026c4a2ebd6aa3517f04fbc897b3e6ca6cf766f3b345d68742924d24880"} Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.897823 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-4htss" podStartSLOduration=1.89780169 podStartE2EDuration="1.89780169s" podCreationTimestamp="2025-12-05 12:28:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:57.894175512 +0000 UTC m=+1183.478497842" watchObservedRunningTime="2025-12-05 12:28:57.89780169 +0000 UTC m=+1183.482124020" Dec 05 12:28:57 crc kubenswrapper[4711]: I1205 12:28:57.916926 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-bcc9-account-create-update-qjdq2" podStartSLOduration=1.916911099 podStartE2EDuration="1.916911099s" podCreationTimestamp="2025-12-05 12:28:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:57.915141746 +0000 UTC m=+1183.499464166" watchObservedRunningTime="2025-12-05 12:28:57.916911099 +0000 UTC m=+1183.501233429" Dec 05 12:28:58 crc kubenswrapper[4711]: I1205 12:28:58.114306 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 12:28:58 crc kubenswrapper[4711]: I1205 12:28:58.121675 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 12:28:58 crc kubenswrapper[4711]: I1205 12:28:58.904549 4711 generic.go:334] "Generic (PLEG): container finished" podID="8bfdb3c4-d406-4d00-b405-2216fbe95943" containerID="f238c69518f7100223bf01fb4068d87baa6238ccb75af139fe957500d3905ee8" exitCode=0 Dec 05 12:28:58 crc kubenswrapper[4711]: I1205 12:28:58.904594 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4htss" event={"ID":"8bfdb3c4-d406-4d00-b405-2216fbe95943","Type":"ContainerDied","Data":"f238c69518f7100223bf01fb4068d87baa6238ccb75af139fe957500d3905ee8"} Dec 05 12:28:58 crc kubenswrapper[4711]: I1205 12:28:58.906806 4711 generic.go:334] "Generic (PLEG): container finished" podID="500de779-f104-4141-87e8-95e80e23a870" containerID="c8c9107256b64f80317aa1a558204b85dc735cfc8eabcf4558e2305e84582aa3" exitCode=0 Dec 05 12:28:58 crc kubenswrapper[4711]: I1205 12:28:58.906920 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-bcc9-account-create-update-qjdq2" event={"ID":"500de779-f104-4141-87e8-95e80e23a870","Type":"ContainerDied","Data":"c8c9107256b64f80317aa1a558204b85dc735cfc8eabcf4558e2305e84582aa3"} Dec 05 12:28:58 crc kubenswrapper[4711]: I1205 12:28:58.909462 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:02 crc kubenswrapper[4711]: I1205 12:29:02.636145 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:29:02 crc kubenswrapper[4711]: I1205 12:29:02.638523 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="prometheus" containerID="cri-o://fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996" gracePeriod=600 Dec 05 12:29:02 crc kubenswrapper[4711]: I1205 12:29:02.639028 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="thanos-sidecar" containerID="cri-o://07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f" gracePeriod=600 Dec 05 12:29:02 crc kubenswrapper[4711]: I1205 12:29:02.639192 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="config-reloader" containerID="cri-o://e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3" gracePeriod=600 Dec 05 12:29:03 crc kubenswrapper[4711]: I1205 12:29:03.132173 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="prometheus" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.404193 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4htss" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.411332 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.429933 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-795fh" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.436090 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.455190 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.535372 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55ffbfff47-b5jn7"] Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.535652 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" podUID="9db9755e-38c2-44d8-a728-424a4ff924a0" containerName="dnsmasq-dns" containerID="cri-o://9f2edb50c520d49dffb48325213f290e472a0a63fad383e0c9a7ec20ddc5efba" gracePeriod=10 Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.577219 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxxgl\" (UniqueName: \"kubernetes.io/projected/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-kube-api-access-wxxgl\") pod \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\" (UID: \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\") " Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.577330 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500de779-f104-4141-87e8-95e80e23a870-operator-scripts\") pod \"500de779-f104-4141-87e8-95e80e23a870\" (UID: \"500de779-f104-4141-87e8-95e80e23a870\") " Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.577421 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-operator-scripts\") pod \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\" (UID: \"7e4b2ca5-1164-478f-b5cd-b3a553a5100e\") " Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.577489 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xshrt\" (UniqueName: \"kubernetes.io/projected/500de779-f104-4141-87e8-95e80e23a870-kube-api-access-xshrt\") pod \"500de779-f104-4141-87e8-95e80e23a870\" (UID: \"500de779-f104-4141-87e8-95e80e23a870\") " Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.577513 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bfdb3c4-d406-4d00-b405-2216fbe95943-operator-scripts\") pod \"8bfdb3c4-d406-4d00-b405-2216fbe95943\" (UID: \"8bfdb3c4-d406-4d00-b405-2216fbe95943\") " Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.577534 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cslg9\" (UniqueName: \"kubernetes.io/projected/8bfdb3c4-d406-4d00-b405-2216fbe95943-kube-api-access-cslg9\") pod \"8bfdb3c4-d406-4d00-b405-2216fbe95943\" (UID: \"8bfdb3c4-d406-4d00-b405-2216fbe95943\") " Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.577609 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcf9f\" (UniqueName: \"kubernetes.io/projected/2d52e95e-2ce4-4ea1-a9c3-50360150e675-kube-api-access-kcf9f\") pod \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\" (UID: \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\") " Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.577693 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d52e95e-2ce4-4ea1-a9c3-50360150e675-operator-scripts\") pod \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\" (UID: \"2d52e95e-2ce4-4ea1-a9c3-50360150e675\") " Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.582373 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/500de779-f104-4141-87e8-95e80e23a870-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "500de779-f104-4141-87e8-95e80e23a870" (UID: "500de779-f104-4141-87e8-95e80e23a870"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.584441 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bfdb3c4-d406-4d00-b405-2216fbe95943-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8bfdb3c4-d406-4d00-b405-2216fbe95943" (UID: "8bfdb3c4-d406-4d00-b405-2216fbe95943"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.584528 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e4b2ca5-1164-478f-b5cd-b3a553a5100e" (UID: "7e4b2ca5-1164-478f-b5cd-b3a553a5100e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.584886 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d52e95e-2ce4-4ea1-a9c3-50360150e675-kube-api-access-kcf9f" (OuterVolumeSpecName: "kube-api-access-kcf9f") pod "2d52e95e-2ce4-4ea1-a9c3-50360150e675" (UID: "2d52e95e-2ce4-4ea1-a9c3-50360150e675"). InnerVolumeSpecName "kube-api-access-kcf9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.584939 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d52e95e-2ce4-4ea1-a9c3-50360150e675-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2d52e95e-2ce4-4ea1-a9c3-50360150e675" (UID: "2d52e95e-2ce4-4ea1-a9c3-50360150e675"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.586764 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-kube-api-access-wxxgl" (OuterVolumeSpecName: "kube-api-access-wxxgl") pod "7e4b2ca5-1164-478f-b5cd-b3a553a5100e" (UID: "7e4b2ca5-1164-478f-b5cd-b3a553a5100e"). InnerVolumeSpecName "kube-api-access-wxxgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.603446 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/500de779-f104-4141-87e8-95e80e23a870-kube-api-access-xshrt" (OuterVolumeSpecName: "kube-api-access-xshrt") pod "500de779-f104-4141-87e8-95e80e23a870" (UID: "500de779-f104-4141-87e8-95e80e23a870"). InnerVolumeSpecName "kube-api-access-xshrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.603857 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bfdb3c4-d406-4d00-b405-2216fbe95943-kube-api-access-cslg9" (OuterVolumeSpecName: "kube-api-access-cslg9") pod "8bfdb3c4-d406-4d00-b405-2216fbe95943" (UID: "8bfdb3c4-d406-4d00-b405-2216fbe95943"). InnerVolumeSpecName "kube-api-access-cslg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.679747 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d52e95e-2ce4-4ea1-a9c3-50360150e675-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.679793 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxxgl\" (UniqueName: \"kubernetes.io/projected/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-kube-api-access-wxxgl\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.679811 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500de779-f104-4141-87e8-95e80e23a870-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.679822 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2ca5-1164-478f-b5cd-b3a553a5100e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.679834 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xshrt\" (UniqueName: \"kubernetes.io/projected/500de779-f104-4141-87e8-95e80e23a870-kube-api-access-xshrt\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.679845 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bfdb3c4-d406-4d00-b405-2216fbe95943-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.679856 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cslg9\" (UniqueName: \"kubernetes.io/projected/8bfdb3c4-d406-4d00-b405-2216fbe95943-kube-api-access-cslg9\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.679866 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcf9f\" (UniqueName: \"kubernetes.io/projected/2d52e95e-2ce4-4ea1-a9c3-50360150e675-kube-api-access-kcf9f\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.896151 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.974993 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-bcc9-account-create-update-qjdq2" event={"ID":"500de779-f104-4141-87e8-95e80e23a870","Type":"ContainerDied","Data":"e63a383031ae5198e3958995ea26d251afbc09ef5df6adbe7a8dedbd18b815ac"} Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.975035 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e63a383031ae5198e3958995ea26d251afbc09ef5df6adbe7a8dedbd18b815ac" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.975117 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-bcc9-account-create-update-qjdq2" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.978423 4711 generic.go:334] "Generic (PLEG): container finished" podID="9db9755e-38c2-44d8-a728-424a4ff924a0" containerID="9f2edb50c520d49dffb48325213f290e472a0a63fad383e0c9a7ec20ddc5efba" exitCode=0 Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.978510 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" event={"ID":"9db9755e-38c2-44d8-a728-424a4ff924a0","Type":"ContainerDied","Data":"9f2edb50c520d49dffb48325213f290e472a0a63fad383e0c9a7ec20ddc5efba"} Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.981754 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4htss" event={"ID":"8bfdb3c4-d406-4d00-b405-2216fbe95943","Type":"ContainerDied","Data":"8a4f3fb4239c3a7b68c6e263d3f4dbb3dffde4a740e09246537538c9caf023e2"} Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.981793 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a4f3fb4239c3a7b68c6e263d3f4dbb3dffde4a740e09246537538c9caf023e2" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.981841 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4htss" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.987272 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-93db-account-create-update-ft8x5" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.987318 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-93db-account-create-update-ft8x5" event={"ID":"7e4b2ca5-1164-478f-b5cd-b3a553a5100e","Type":"ContainerDied","Data":"a9d288378ac4a80d2b950c10eda6b86eea2ddbc2438af055653805e97e3ada43"} Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.987368 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9d288378ac4a80d2b950c10eda6b86eea2ddbc2438af055653805e97e3ada43" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.988963 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-795fh" event={"ID":"2d52e95e-2ce4-4ea1-a9c3-50360150e675","Type":"ContainerDied","Data":"5831d38d4e13ccff741182a802ce4f1a23760e35e8d14048939ccbf7d2544999"} Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.988984 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5831d38d4e13ccff741182a802ce4f1a23760e35e8d14048939ccbf7d2544999" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.989030 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-795fh" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.996407 4711 generic.go:334] "Generic (PLEG): container finished" podID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerID="07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f" exitCode=0 Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.996441 4711 generic.go:334] "Generic (PLEG): container finished" podID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerID="e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3" exitCode=0 Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.996449 4711 generic.go:334] "Generic (PLEG): container finished" podID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerID="fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996" exitCode=0 Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.996469 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerDied","Data":"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f"} Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.996492 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerDied","Data":"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3"} Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.996503 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerDied","Data":"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996"} Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.996512 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a5dc5cdb-751f-4754-af33-2985c29b98a3","Type":"ContainerDied","Data":"fb35dc6be59db173e175f0d1a931def1e676e6e3f3d1d837ee28ba7a83b60b10"} Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.996531 4711 scope.go:117] "RemoveContainer" containerID="07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f" Dec 05 12:29:04 crc kubenswrapper[4711]: I1205 12:29:04.996665 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.030572 4711 scope.go:117] "RemoveContainer" containerID="e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.048907 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.063027 4711 scope.go:117] "RemoveContainer" containerID="fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088380 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"a5dc5cdb-751f-4754-af33-2985c29b98a3\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088441 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-web-config\") pod \"a5dc5cdb-751f-4754-af33-2985c29b98a3\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088471 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a5dc5cdb-751f-4754-af33-2985c29b98a3-config-out\") pod \"a5dc5cdb-751f-4754-af33-2985c29b98a3\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088503 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-thanos-prometheus-http-client-file\") pod \"a5dc5cdb-751f-4754-af33-2985c29b98a3\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088565 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-tls-assets\") pod \"a5dc5cdb-751f-4754-af33-2985c29b98a3\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088591 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a5dc5cdb-751f-4754-af33-2985c29b98a3-prometheus-metric-storage-rulefiles-0\") pod \"a5dc5cdb-751f-4754-af33-2985c29b98a3\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088634 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-config\") pod \"a5dc5cdb-751f-4754-af33-2985c29b98a3\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088767 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfghw\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-kube-api-access-tfghw\") pod \"a5dc5cdb-751f-4754-af33-2985c29b98a3\" (UID: \"a5dc5cdb-751f-4754-af33-2985c29b98a3\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088811 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-sb\") pod \"9db9755e-38c2-44d8-a728-424a4ff924a0\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.088830 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-nb\") pod \"9db9755e-38c2-44d8-a728-424a4ff924a0\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.108624 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5dc5cdb-751f-4754-af33-2985c29b98a3-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "a5dc5cdb-751f-4754-af33-2985c29b98a3" (UID: "a5dc5cdb-751f-4754-af33-2985c29b98a3"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.111847 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-config" (OuterVolumeSpecName: "config") pod "a5dc5cdb-751f-4754-af33-2985c29b98a3" (UID: "a5dc5cdb-751f-4754-af33-2985c29b98a3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.112136 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-kube-api-access-tfghw" (OuterVolumeSpecName: "kube-api-access-tfghw") pod "a5dc5cdb-751f-4754-af33-2985c29b98a3" (UID: "a5dc5cdb-751f-4754-af33-2985c29b98a3"). InnerVolumeSpecName "kube-api-access-tfghw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.116890 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "a5dc5cdb-751f-4754-af33-2985c29b98a3" (UID: "a5dc5cdb-751f-4754-af33-2985c29b98a3"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.133641 4711 scope.go:117] "RemoveContainer" containerID="3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.133699 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "a5dc5cdb-751f-4754-af33-2985c29b98a3" (UID: "a5dc5cdb-751f-4754-af33-2985c29b98a3"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.134175 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5dc5cdb-751f-4754-af33-2985c29b98a3-config-out" (OuterVolumeSpecName: "config-out") pod "a5dc5cdb-751f-4754-af33-2985c29b98a3" (UID: "a5dc5cdb-751f-4754-af33-2985c29b98a3"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.155415 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "a5dc5cdb-751f-4754-af33-2985c29b98a3" (UID: "a5dc5cdb-751f-4754-af33-2985c29b98a3"). InnerVolumeSpecName "pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.157004 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-web-config" (OuterVolumeSpecName: "web-config") pod "a5dc5cdb-751f-4754-af33-2985c29b98a3" (UID: "a5dc5cdb-751f-4754-af33-2985c29b98a3"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.157980 4711 scope.go:117] "RemoveContainer" containerID="07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.158435 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f\": container with ID starting with 07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f not found: ID does not exist" containerID="07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.158476 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f"} err="failed to get container status \"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f\": rpc error: code = NotFound desc = could not find container \"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f\": container with ID starting with 07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.158504 4711 scope.go:117] "RemoveContainer" containerID="e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.158873 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3\": container with ID starting with e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3 not found: ID does not exist" containerID="e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.158921 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3"} err="failed to get container status \"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3\": rpc error: code = NotFound desc = could not find container \"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3\": container with ID starting with e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3 not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.158951 4711 scope.go:117] "RemoveContainer" containerID="fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.159266 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996\": container with ID starting with fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996 not found: ID does not exist" containerID="fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.159295 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996"} err="failed to get container status \"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996\": rpc error: code = NotFound desc = could not find container \"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996\": container with ID starting with fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996 not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.159315 4711 scope.go:117] "RemoveContainer" containerID="3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.159673 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194\": container with ID starting with 3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194 not found: ID does not exist" containerID="3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.159802 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194"} err="failed to get container status \"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194\": rpc error: code = NotFound desc = could not find container \"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194\": container with ID starting with 3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194 not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.160258 4711 scope.go:117] "RemoveContainer" containerID="07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.160619 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f"} err="failed to get container status \"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f\": rpc error: code = NotFound desc = could not find container \"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f\": container with ID starting with 07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.160647 4711 scope.go:117] "RemoveContainer" containerID="e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.160880 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3"} err="failed to get container status \"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3\": rpc error: code = NotFound desc = could not find container \"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3\": container with ID starting with e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3 not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.160902 4711 scope.go:117] "RemoveContainer" containerID="fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.161185 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996"} err="failed to get container status \"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996\": rpc error: code = NotFound desc = could not find container \"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996\": container with ID starting with fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996 not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.161265 4711 scope.go:117] "RemoveContainer" containerID="3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.161920 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194"} err="failed to get container status \"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194\": rpc error: code = NotFound desc = could not find container \"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194\": container with ID starting with 3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194 not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.161953 4711 scope.go:117] "RemoveContainer" containerID="07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.162232 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f"} err="failed to get container status \"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f\": rpc error: code = NotFound desc = could not find container \"07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f\": container with ID starting with 07c4742c3dc777757e1af6c33f218c1ea386d6d36df41a5ac9d7405dc4af028f not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.162335 4711 scope.go:117] "RemoveContainer" containerID="e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.162656 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3"} err="failed to get container status \"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3\": rpc error: code = NotFound desc = could not find container \"e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3\": container with ID starting with e4f2c8e745a1c0e30062b7f724d45a4f4e9c45f1ce9a9715067552ebdddee5d3 not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.162688 4711 scope.go:117] "RemoveContainer" containerID="fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.162961 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996"} err="failed to get container status \"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996\": rpc error: code = NotFound desc = could not find container \"fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996\": container with ID starting with fc0ef32746d66e22b436931a3efa4b41b0ed54300cba0860fcb7bdeefe13d996 not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.162988 4711 scope.go:117] "RemoveContainer" containerID="3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.163290 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194"} err="failed to get container status \"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194\": rpc error: code = NotFound desc = could not find container \"3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194\": container with ID starting with 3b215264da79e49fb848674c60908045c397cba95ec4b49db58dba0cf89b5194 not found: ID does not exist" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.185502 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9db9755e-38c2-44d8-a728-424a4ff924a0" (UID: "9db9755e-38c2-44d8-a728-424a4ff924a0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.187709 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9db9755e-38c2-44d8-a728-424a4ff924a0" (UID: "9db9755e-38c2-44d8-a728-424a4ff924a0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.189876 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlc68\" (UniqueName: \"kubernetes.io/projected/9db9755e-38c2-44d8-a728-424a4ff924a0-kube-api-access-vlc68\") pod \"9db9755e-38c2-44d8-a728-424a4ff924a0\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.190158 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-dns-svc\") pod \"9db9755e-38c2-44d8-a728-424a4ff924a0\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.190317 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-config\") pod \"9db9755e-38c2-44d8-a728-424a4ff924a0\" (UID: \"9db9755e-38c2-44d8-a728-424a4ff924a0\") " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.190669 4711 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.190740 4711 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.190793 4711 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a5dc5cdb-751f-4754-af33-2985c29b98a3-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.190843 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.190892 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfghw\" (UniqueName: \"kubernetes.io/projected/a5dc5cdb-751f-4754-af33-2985c29b98a3-kube-api-access-tfghw\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.191032 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.191100 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.191172 4711 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") on node \"crc\" " Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.191230 4711 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a5dc5cdb-751f-4754-af33-2985c29b98a3-web-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.191289 4711 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a5dc5cdb-751f-4754-af33-2985c29b98a3-config-out\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.195655 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9db9755e-38c2-44d8-a728-424a4ff924a0-kube-api-access-vlc68" (OuterVolumeSpecName: "kube-api-access-vlc68") pod "9db9755e-38c2-44d8-a728-424a4ff924a0" (UID: "9db9755e-38c2-44d8-a728-424a4ff924a0"). InnerVolumeSpecName "kube-api-access-vlc68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.227826 4711 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.228019 4711 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03") on node "crc" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.246324 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-config" (OuterVolumeSpecName: "config") pod "9db9755e-38c2-44d8-a728-424a4ff924a0" (UID: "9db9755e-38c2-44d8-a728-424a4ff924a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.247945 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9db9755e-38c2-44d8-a728-424a4ff924a0" (UID: "9db9755e-38c2-44d8-a728-424a4ff924a0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.290968 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.292462 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.292497 4711 reconciler_common.go:293] "Volume detached for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.292508 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db9755e-38c2-44d8-a728-424a4ff924a0-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.292519 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlc68\" (UniqueName: \"kubernetes.io/projected/9db9755e-38c2-44d8-a728-424a4ff924a0-kube-api-access-vlc68\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.361441 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.363305 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.397991 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398445 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d52e95e-2ce4-4ea1-a9c3-50360150e675" containerName="mariadb-database-create" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398466 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d52e95e-2ce4-4ea1-a9c3-50360150e675" containerName="mariadb-database-create" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398483 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4b2ca5-1164-478f-b5cd-b3a553a5100e" containerName="mariadb-account-create-update" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398491 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4b2ca5-1164-478f-b5cd-b3a553a5100e" containerName="mariadb-account-create-update" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398505 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="init-config-reloader" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398513 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="init-config-reloader" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398527 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500de779-f104-4141-87e8-95e80e23a870" containerName="mariadb-account-create-update" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398536 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="500de779-f104-4141-87e8-95e80e23a870" containerName="mariadb-account-create-update" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398549 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bfdb3c4-d406-4d00-b405-2216fbe95943" containerName="mariadb-database-create" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398557 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bfdb3c4-d406-4d00-b405-2216fbe95943" containerName="mariadb-database-create" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398568 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="thanos-sidecar" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398577 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="thanos-sidecar" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398590 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db9755e-38c2-44d8-a728-424a4ff924a0" containerName="dnsmasq-dns" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398598 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db9755e-38c2-44d8-a728-424a4ff924a0" containerName="dnsmasq-dns" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398621 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="config-reloader" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398630 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="config-reloader" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398648 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="prometheus" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398655 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="prometheus" Dec 05 12:29:05 crc kubenswrapper[4711]: E1205 12:29:05.398680 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db9755e-38c2-44d8-a728-424a4ff924a0" containerName="init" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398688 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db9755e-38c2-44d8-a728-424a4ff924a0" containerName="init" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398887 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="500de779-f104-4141-87e8-95e80e23a870" containerName="mariadb-account-create-update" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398902 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="prometheus" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398913 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d52e95e-2ce4-4ea1-a9c3-50360150e675" containerName="mariadb-database-create" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398923 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="thanos-sidecar" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398933 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4b2ca5-1164-478f-b5cd-b3a553a5100e" containerName="mariadb-account-create-update" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398941 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" containerName="config-reloader" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398948 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bfdb3c4-d406-4d00-b405-2216fbe95943" containerName="mariadb-database-create" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.398957 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9db9755e-38c2-44d8-a728-424a4ff924a0" containerName="dnsmasq-dns" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.401920 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.407263 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-jq6ps" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.407659 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.413257 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.413453 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.413589 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.413695 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.421557 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.435871 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.597523 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.597589 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.597696 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-config\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.597747 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.597790 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.597860 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw9r5\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-kube-api-access-rw9r5\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.597901 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f11ac32e-01b9-4424-9a1b-008f367f316e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.597949 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.597985 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.598015 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.598054 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f11ac32e-01b9-4424-9a1b-008f367f316e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.699635 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f11ac32e-01b9-4424-9a1b-008f367f316e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.699710 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.699765 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.699820 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-config\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.699843 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.699863 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.699910 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw9r5\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-kube-api-access-rw9r5\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.699950 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f11ac32e-01b9-4424-9a1b-008f367f316e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.700004 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.700041 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.700063 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.700990 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f11ac32e-01b9-4424-9a1b-008f367f316e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.702436 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.702477 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0f9bd9950147f59c5b738a088845da7f6ecb4f45a3918a37eadb6099082c1159/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.704510 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f11ac32e-01b9-4424-9a1b-008f367f316e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.704902 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.705266 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-config\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.706167 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.707383 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.707973 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.708273 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.708908 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.727360 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw9r5\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-kube-api-access-rw9r5\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:05 crc kubenswrapper[4711]: I1205 12:29:05.747196 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.007422 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" event={"ID":"9db9755e-38c2-44d8-a728-424a4ff924a0","Type":"ContainerDied","Data":"fa2330e371540a6bf2b84370b6cd2124b33edf2efa97582aaa2c5763fffe346f"} Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.007482 4711 scope.go:117] "RemoveContainer" containerID="9f2edb50c520d49dffb48325213f290e472a0a63fad383e0c9a7ec20ddc5efba" Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.007527 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55ffbfff47-b5jn7" Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.009298 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-np4sn" event={"ID":"2aadbf74-6733-453e-bdfc-f8b8bb7f60da","Type":"ContainerStarted","Data":"cf4104599a770557f3c48b31317f1032f4ef66758473ae6f8eb6e0381b5c84f8"} Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.028235 4711 scope.go:117] "RemoveContainer" containerID="f36e03dd25124e83e8662993d7d082575dc8b6cab22d822f5db544bebd18346b" Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.037051 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-np4sn" podStartSLOduration=2.621358223 podStartE2EDuration="10.03702992s" podCreationTimestamp="2025-12-05 12:28:56 +0000 UTC" firstStartedPulling="2025-12-05 12:28:57.278014453 +0000 UTC m=+1182.862336783" lastFinishedPulling="2025-12-05 12:29:04.69368615 +0000 UTC m=+1190.278008480" observedRunningTime="2025-12-05 12:29:06.031985446 +0000 UTC m=+1191.616307786" watchObservedRunningTime="2025-12-05 12:29:06.03702992 +0000 UTC m=+1191.621352250" Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.042832 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.056006 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55ffbfff47-b5jn7"] Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.061985 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55ffbfff47-b5jn7"] Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.168639 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:29:06 crc kubenswrapper[4711]: W1205 12:29:06.608119 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf11ac32e_01b9_4424_9a1b_008f367f316e.slice/crio-931dd710de3ecf077e7d562e76c4152d1ff1776c57a0118ad8ac0a60213c52c8 WatchSource:0}: Error finding container 931dd710de3ecf077e7d562e76c4152d1ff1776c57a0118ad8ac0a60213c52c8: Status 404 returned error can't find the container with id 931dd710de3ecf077e7d562e76c4152d1ff1776c57a0118ad8ac0a60213c52c8 Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.615414 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.694425 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9db9755e-38c2-44d8-a728-424a4ff924a0" path="/var/lib/kubelet/pods/9db9755e-38c2-44d8-a728-424a4ff924a0/volumes" Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.695343 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5dc5cdb-751f-4754-af33-2985c29b98a3" path="/var/lib/kubelet/pods/a5dc5cdb-751f-4754-af33-2985c29b98a3/volumes" Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.997291 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-ldxm8"] Dec 05 12:29:06 crc kubenswrapper[4711]: I1205 12:29:06.998669 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.019364 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerStarted","Data":"931dd710de3ecf077e7d562e76c4152d1ff1776c57a0118ad8ac0a60213c52c8"} Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.022950 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-ldxm8"] Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.071591 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-h26mq"] Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.074228 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.076915 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-t9qnb" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.077504 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.082957 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-h26mq"] Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.136223 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-6d77-account-create-update-gh4ds"] Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.137444 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.139464 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.151659 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-6d77-account-create-update-gh4ds"] Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.169282 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-db-sync-config-data\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.169669 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vkrj\" (UniqueName: \"kubernetes.io/projected/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-kube-api-access-7vkrj\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.170593 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-config-data\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.170635 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-combined-ca-bundle\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.170693 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-operator-scripts\") pod \"glance-db-create-ldxm8\" (UID: \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\") " pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.170748 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s7sj\" (UniqueName: \"kubernetes.io/projected/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-kube-api-access-9s7sj\") pod \"glance-db-create-ldxm8\" (UID: \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\") " pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.202880 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-tfvxw"] Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.204287 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.215065 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-tfvxw"] Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.272066 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-operator-scripts\") pod \"glance-db-create-ldxm8\" (UID: \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\") " pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.272145 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s7sj\" (UniqueName: \"kubernetes.io/projected/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-kube-api-access-9s7sj\") pod \"glance-db-create-ldxm8\" (UID: \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\") " pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.272196 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-db-sync-config-data\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.272226 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-operator-scripts\") pod \"glance-6d77-account-create-update-gh4ds\" (UID: \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\") " pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.272264 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vkrj\" (UniqueName: \"kubernetes.io/projected/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-kube-api-access-7vkrj\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.272324 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-config-data\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.272344 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-combined-ca-bundle\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.272400 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfj4p\" (UniqueName: \"kubernetes.io/projected/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-kube-api-access-lfj4p\") pod \"glance-6d77-account-create-update-gh4ds\" (UID: \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\") " pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.272880 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-operator-scripts\") pod \"glance-db-create-ldxm8\" (UID: \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\") " pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.277532 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-db-sync-config-data\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.280902 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-config-data\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.291759 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-combined-ca-bundle\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.296810 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vkrj\" (UniqueName: \"kubernetes.io/projected/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-kube-api-access-7vkrj\") pod \"watcher-db-sync-h26mq\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.304702 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s7sj\" (UniqueName: \"kubernetes.io/projected/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-kube-api-access-9s7sj\") pod \"glance-db-create-ldxm8\" (UID: \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\") " pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.314446 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-ca3b-account-create-update-mnv67"] Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.315615 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.318468 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.343949 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.353642 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ca3b-account-create-update-mnv67"] Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.374359 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mw7p\" (UniqueName: \"kubernetes.io/projected/847c5ee7-c53a-4173-82ad-313e68c42910-kube-api-access-9mw7p\") pod \"neutron-db-create-tfvxw\" (UID: \"847c5ee7-c53a-4173-82ad-313e68c42910\") " pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.374563 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfj4p\" (UniqueName: \"kubernetes.io/projected/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-kube-api-access-lfj4p\") pod \"glance-6d77-account-create-update-gh4ds\" (UID: \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\") " pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.374649 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/847c5ee7-c53a-4173-82ad-313e68c42910-operator-scripts\") pod \"neutron-db-create-tfvxw\" (UID: \"847c5ee7-c53a-4173-82ad-313e68c42910\") " pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.374897 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-operator-scripts\") pod \"glance-6d77-account-create-update-gh4ds\" (UID: \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\") " pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.377828 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-operator-scripts\") pod \"glance-6d77-account-create-update-gh4ds\" (UID: \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\") " pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.413624 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.422972 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfj4p\" (UniqueName: \"kubernetes.io/projected/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-kube-api-access-lfj4p\") pod \"glance-6d77-account-create-update-gh4ds\" (UID: \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\") " pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.466222 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.476835 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rx8n\" (UniqueName: \"kubernetes.io/projected/4881af57-6b36-4c72-ba21-4ac8872288a1-kube-api-access-5rx8n\") pod \"neutron-ca3b-account-create-update-mnv67\" (UID: \"4881af57-6b36-4c72-ba21-4ac8872288a1\") " pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.476896 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mw7p\" (UniqueName: \"kubernetes.io/projected/847c5ee7-c53a-4173-82ad-313e68c42910-kube-api-access-9mw7p\") pod \"neutron-db-create-tfvxw\" (UID: \"847c5ee7-c53a-4173-82ad-313e68c42910\") " pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.476922 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4881af57-6b36-4c72-ba21-4ac8872288a1-operator-scripts\") pod \"neutron-ca3b-account-create-update-mnv67\" (UID: \"4881af57-6b36-4c72-ba21-4ac8872288a1\") " pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.476983 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/847c5ee7-c53a-4173-82ad-313e68c42910-operator-scripts\") pod \"neutron-db-create-tfvxw\" (UID: \"847c5ee7-c53a-4173-82ad-313e68c42910\") " pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.477742 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/847c5ee7-c53a-4173-82ad-313e68c42910-operator-scripts\") pod \"neutron-db-create-tfvxw\" (UID: \"847c5ee7-c53a-4173-82ad-313e68c42910\") " pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.513789 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mw7p\" (UniqueName: \"kubernetes.io/projected/847c5ee7-c53a-4173-82ad-313e68c42910-kube-api-access-9mw7p\") pod \"neutron-db-create-tfvxw\" (UID: \"847c5ee7-c53a-4173-82ad-313e68c42910\") " pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.534065 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.578837 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rx8n\" (UniqueName: \"kubernetes.io/projected/4881af57-6b36-4c72-ba21-4ac8872288a1-kube-api-access-5rx8n\") pod \"neutron-ca3b-account-create-update-mnv67\" (UID: \"4881af57-6b36-4c72-ba21-4ac8872288a1\") " pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.578886 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4881af57-6b36-4c72-ba21-4ac8872288a1-operator-scripts\") pod \"neutron-ca3b-account-create-update-mnv67\" (UID: \"4881af57-6b36-4c72-ba21-4ac8872288a1\") " pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.579676 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4881af57-6b36-4c72-ba21-4ac8872288a1-operator-scripts\") pod \"neutron-ca3b-account-create-update-mnv67\" (UID: \"4881af57-6b36-4c72-ba21-4ac8872288a1\") " pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.612121 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rx8n\" (UniqueName: \"kubernetes.io/projected/4881af57-6b36-4c72-ba21-4ac8872288a1-kube-api-access-5rx8n\") pod \"neutron-ca3b-account-create-update-mnv67\" (UID: \"4881af57-6b36-4c72-ba21-4ac8872288a1\") " pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.710762 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:07 crc kubenswrapper[4711]: I1205 12:29:07.896335 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-ldxm8"] Dec 05 12:29:08 crc kubenswrapper[4711]: I1205 12:29:08.177805 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-6d77-account-create-update-gh4ds"] Dec 05 12:29:08 crc kubenswrapper[4711]: I1205 12:29:08.189643 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-h26mq"] Dec 05 12:29:08 crc kubenswrapper[4711]: I1205 12:29:08.263067 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-tfvxw"] Dec 05 12:29:08 crc kubenswrapper[4711]: W1205 12:29:08.504212 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9caa91dd_f6d6_49a2_ba31_ab3ba2c4bd71.slice/crio-e68a72323191a08e66544a594fe996c7f015a5bef7d5753285a601a96776631d WatchSource:0}: Error finding container e68a72323191a08e66544a594fe996c7f015a5bef7d5753285a601a96776631d: Status 404 returned error can't find the container with id e68a72323191a08e66544a594fe996c7f015a5bef7d5753285a601a96776631d Dec 05 12:29:08 crc kubenswrapper[4711]: W1205 12:29:08.507448 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd12a5b3d_46d8_41a5_95c8_1f95f238c8b1.slice/crio-d7529cf5f05c77d0c3bc06118323b441116b402f7d6c01ef900e2a46b5ec866e WatchSource:0}: Error finding container d7529cf5f05c77d0c3bc06118323b441116b402f7d6c01ef900e2a46b5ec866e: Status 404 returned error can't find the container with id d7529cf5f05c77d0c3bc06118323b441116b402f7d6c01ef900e2a46b5ec866e Dec 05 12:29:09 crc kubenswrapper[4711]: I1205 12:29:09.038106 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-ldxm8" event={"ID":"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1","Type":"ContainerStarted","Data":"d7529cf5f05c77d0c3bc06118323b441116b402f7d6c01ef900e2a46b5ec866e"} Dec 05 12:29:09 crc kubenswrapper[4711]: I1205 12:29:09.040322 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-6d77-account-create-update-gh4ds" event={"ID":"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71","Type":"ContainerStarted","Data":"e68a72323191a08e66544a594fe996c7f015a5bef7d5753285a601a96776631d"} Dec 05 12:29:09 crc kubenswrapper[4711]: I1205 12:29:09.045330 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-tfvxw" event={"ID":"847c5ee7-c53a-4173-82ad-313e68c42910","Type":"ContainerStarted","Data":"063a4d4a6cbc3e47e9a000426d6bb20f5f8f480f10d48b82473aa2e9f600113f"} Dec 05 12:29:09 crc kubenswrapper[4711]: I1205 12:29:09.046904 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-h26mq" event={"ID":"c46ee7cc-06c9-41b0-b560-cc35c14dbf00","Type":"ContainerStarted","Data":"df3875247d6c204571bfec835db2ec9c6049fa419b23d427da8d578ad24c1743"} Dec 05 12:29:09 crc kubenswrapper[4711]: I1205 12:29:09.084365 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ca3b-account-create-update-mnv67"] Dec 05 12:29:09 crc kubenswrapper[4711]: W1205 12:29:09.237069 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4881af57_6b36_4c72_ba21_4ac8872288a1.slice/crio-092de46d98ea575afc53ee03c126c552292125d3a784e1e4cd94bc81f2eddfd7 WatchSource:0}: Error finding container 092de46d98ea575afc53ee03c126c552292125d3a784e1e4cd94bc81f2eddfd7: Status 404 returned error can't find the container with id 092de46d98ea575afc53ee03c126c552292125d3a784e1e4cd94bc81f2eddfd7 Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.061169 4711 generic.go:334] "Generic (PLEG): container finished" podID="847c5ee7-c53a-4173-82ad-313e68c42910" containerID="d761a512e557dee327845990959c42d8c101a9816cfeac2cd0cb73475cc7388c" exitCode=0 Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.061257 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-tfvxw" event={"ID":"847c5ee7-c53a-4173-82ad-313e68c42910","Type":"ContainerDied","Data":"d761a512e557dee327845990959c42d8c101a9816cfeac2cd0cb73475cc7388c"} Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.066327 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ca3b-account-create-update-mnv67" event={"ID":"4881af57-6b36-4c72-ba21-4ac8872288a1","Type":"ContainerStarted","Data":"0ff4e599964d18574948483cdd8e7e316cc0778a0bd0f41429c60253f4c007b0"} Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.066373 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ca3b-account-create-update-mnv67" event={"ID":"4881af57-6b36-4c72-ba21-4ac8872288a1","Type":"ContainerStarted","Data":"092de46d98ea575afc53ee03c126c552292125d3a784e1e4cd94bc81f2eddfd7"} Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.068242 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerStarted","Data":"ab7da21819607da2997545f525ccce52c43e6f028ae6d55066abbe2eed2cce91"} Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.079867 4711 generic.go:334] "Generic (PLEG): container finished" podID="d12a5b3d-46d8-41a5-95c8-1f95f238c8b1" containerID="88635986f00781c8eda5d72e0bd8e78ab0a13288cfe76fe85254cabb9914d728" exitCode=0 Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.080036 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-ldxm8" event={"ID":"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1","Type":"ContainerDied","Data":"88635986f00781c8eda5d72e0bd8e78ab0a13288cfe76fe85254cabb9914d728"} Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.083785 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-6d77-account-create-update-gh4ds" event={"ID":"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71","Type":"ContainerStarted","Data":"98a36a74dcc1ed4ce5d280a54d469e2fcc0c8792c70948dc5d786088eb92f28a"} Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.098332 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-ca3b-account-create-update-mnv67" podStartSLOduration=3.098317255 podStartE2EDuration="3.098317255s" podCreationTimestamp="2025-12-05 12:29:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:29:10.093041846 +0000 UTC m=+1195.677364186" watchObservedRunningTime="2025-12-05 12:29:10.098317255 +0000 UTC m=+1195.682639585" Dec 05 12:29:10 crc kubenswrapper[4711]: I1205 12:29:10.154781 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-6d77-account-create-update-gh4ds" podStartSLOduration=3.15476595 podStartE2EDuration="3.15476595s" podCreationTimestamp="2025-12-05 12:29:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:29:10.148009604 +0000 UTC m=+1195.732331934" watchObservedRunningTime="2025-12-05 12:29:10.15476595 +0000 UTC m=+1195.739088270" Dec 05 12:29:11 crc kubenswrapper[4711]: I1205 12:29:11.094197 4711 generic.go:334] "Generic (PLEG): container finished" podID="9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71" containerID="98a36a74dcc1ed4ce5d280a54d469e2fcc0c8792c70948dc5d786088eb92f28a" exitCode=0 Dec 05 12:29:11 crc kubenswrapper[4711]: I1205 12:29:11.094249 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-6d77-account-create-update-gh4ds" event={"ID":"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71","Type":"ContainerDied","Data":"98a36a74dcc1ed4ce5d280a54d469e2fcc0c8792c70948dc5d786088eb92f28a"} Dec 05 12:29:11 crc kubenswrapper[4711]: I1205 12:29:11.096473 4711 generic.go:334] "Generic (PLEG): container finished" podID="4881af57-6b36-4c72-ba21-4ac8872288a1" containerID="0ff4e599964d18574948483cdd8e7e316cc0778a0bd0f41429c60253f4c007b0" exitCode=0 Dec 05 12:29:11 crc kubenswrapper[4711]: I1205 12:29:11.096540 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ca3b-account-create-update-mnv67" event={"ID":"4881af57-6b36-4c72-ba21-4ac8872288a1","Type":"ContainerDied","Data":"0ff4e599964d18574948483cdd8e7e316cc0778a0bd0f41429c60253f4c007b0"} Dec 05 12:29:13 crc kubenswrapper[4711]: I1205 12:29:13.115186 4711 generic.go:334] "Generic (PLEG): container finished" podID="2aadbf74-6733-453e-bdfc-f8b8bb7f60da" containerID="cf4104599a770557f3c48b31317f1032f4ef66758473ae6f8eb6e0381b5c84f8" exitCode=0 Dec 05 12:29:13 crc kubenswrapper[4711]: I1205 12:29:13.115280 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-np4sn" event={"ID":"2aadbf74-6733-453e-bdfc-f8b8bb7f60da","Type":"ContainerDied","Data":"cf4104599a770557f3c48b31317f1032f4ef66758473ae6f8eb6e0381b5c84f8"} Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.146656 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.151794 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ca3b-account-create-update-mnv67" event={"ID":"4881af57-6b36-4c72-ba21-4ac8872288a1","Type":"ContainerDied","Data":"092de46d98ea575afc53ee03c126c552292125d3a784e1e4cd94bc81f2eddfd7"} Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.151857 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="092de46d98ea575afc53ee03c126c552292125d3a784e1e4cd94bc81f2eddfd7" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.153763 4711 generic.go:334] "Generic (PLEG): container finished" podID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerID="ab7da21819607da2997545f525ccce52c43e6f028ae6d55066abbe2eed2cce91" exitCode=0 Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.153820 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerDied","Data":"ab7da21819607da2997545f525ccce52c43e6f028ae6d55066abbe2eed2cce91"} Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.156869 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.158244 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-ldxm8" event={"ID":"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1","Type":"ContainerDied","Data":"d7529cf5f05c77d0c3bc06118323b441116b402f7d6c01ef900e2a46b5ec866e"} Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.158274 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7529cf5f05c77d0c3bc06118323b441116b402f7d6c01ef900e2a46b5ec866e" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.167598 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-6d77-account-create-update-gh4ds" event={"ID":"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71","Type":"ContainerDied","Data":"e68a72323191a08e66544a594fe996c7f015a5bef7d5753285a601a96776631d"} Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.171787 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e68a72323191a08e66544a594fe996c7f015a5bef7d5753285a601a96776631d" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.172933 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-tfvxw" event={"ID":"847c5ee7-c53a-4173-82ad-313e68c42910","Type":"ContainerDied","Data":"063a4d4a6cbc3e47e9a000426d6bb20f5f8f480f10d48b82473aa2e9f600113f"} Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.172947 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-tfvxw" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.172961 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="063a4d4a6cbc3e47e9a000426d6bb20f5f8f480f10d48b82473aa2e9f600113f" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.175798 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-np4sn" event={"ID":"2aadbf74-6733-453e-bdfc-f8b8bb7f60da","Type":"ContainerDied","Data":"168e0026c4a2ebd6aa3517f04fbc897b3e6ca6cf766f3b345d68742924d24880"} Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.175844 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="168e0026c4a2ebd6aa3517f04fbc897b3e6ca6cf766f3b345d68742924d24880" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.252503 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mw7p\" (UniqueName: \"kubernetes.io/projected/847c5ee7-c53a-4173-82ad-313e68c42910-kube-api-access-9mw7p\") pod \"847c5ee7-c53a-4173-82ad-313e68c42910\" (UID: \"847c5ee7-c53a-4173-82ad-313e68c42910\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.252612 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9s7sj\" (UniqueName: \"kubernetes.io/projected/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-kube-api-access-9s7sj\") pod \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\" (UID: \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.252742 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-operator-scripts\") pod \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\" (UID: \"d12a5b3d-46d8-41a5-95c8-1f95f238c8b1\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.252806 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/847c5ee7-c53a-4173-82ad-313e68c42910-operator-scripts\") pod \"847c5ee7-c53a-4173-82ad-313e68c42910\" (UID: \"847c5ee7-c53a-4173-82ad-313e68c42910\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.254877 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d12a5b3d-46d8-41a5-95c8-1f95f238c8b1" (UID: "d12a5b3d-46d8-41a5-95c8-1f95f238c8b1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.255314 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/847c5ee7-c53a-4173-82ad-313e68c42910-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "847c5ee7-c53a-4173-82ad-313e68c42910" (UID: "847c5ee7-c53a-4173-82ad-313e68c42910"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.256120 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.259718 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-kube-api-access-9s7sj" (OuterVolumeSpecName: "kube-api-access-9s7sj") pod "d12a5b3d-46d8-41a5-95c8-1f95f238c8b1" (UID: "d12a5b3d-46d8-41a5-95c8-1f95f238c8b1"). InnerVolumeSpecName "kube-api-access-9s7sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.260635 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/847c5ee7-c53a-4173-82ad-313e68c42910-kube-api-access-9mw7p" (OuterVolumeSpecName: "kube-api-access-9mw7p") pod "847c5ee7-c53a-4173-82ad-313e68c42910" (UID: "847c5ee7-c53a-4173-82ad-313e68c42910"). InnerVolumeSpecName "kube-api-access-9mw7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.296703 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.317367 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-np4sn" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.355420 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfj4p\" (UniqueName: \"kubernetes.io/projected/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-kube-api-access-lfj4p\") pod \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\" (UID: \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.355468 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rx8n\" (UniqueName: \"kubernetes.io/projected/4881af57-6b36-4c72-ba21-4ac8872288a1-kube-api-access-5rx8n\") pod \"4881af57-6b36-4c72-ba21-4ac8872288a1\" (UID: \"4881af57-6b36-4c72-ba21-4ac8872288a1\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.355512 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-operator-scripts\") pod \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\" (UID: \"9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.355544 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4881af57-6b36-4c72-ba21-4ac8872288a1-operator-scripts\") pod \"4881af57-6b36-4c72-ba21-4ac8872288a1\" (UID: \"4881af57-6b36-4c72-ba21-4ac8872288a1\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.356039 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mw7p\" (UniqueName: \"kubernetes.io/projected/847c5ee7-c53a-4173-82ad-313e68c42910-kube-api-access-9mw7p\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.356352 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9s7sj\" (UniqueName: \"kubernetes.io/projected/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-kube-api-access-9s7sj\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.356366 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.356378 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/847c5ee7-c53a-4173-82ad-313e68c42910-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.356515 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4881af57-6b36-4c72-ba21-4ac8872288a1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4881af57-6b36-4c72-ba21-4ac8872288a1" (UID: "4881af57-6b36-4c72-ba21-4ac8872288a1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.361010 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71" (UID: "9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.362782 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-kube-api-access-lfj4p" (OuterVolumeSpecName: "kube-api-access-lfj4p") pod "9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71" (UID: "9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71"). InnerVolumeSpecName "kube-api-access-lfj4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.363373 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4881af57-6b36-4c72-ba21-4ac8872288a1-kube-api-access-5rx8n" (OuterVolumeSpecName: "kube-api-access-5rx8n") pod "4881af57-6b36-4c72-ba21-4ac8872288a1" (UID: "4881af57-6b36-4c72-ba21-4ac8872288a1"). InnerVolumeSpecName "kube-api-access-5rx8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.457632 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-combined-ca-bundle\") pod \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.457758 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxwvf\" (UniqueName: \"kubernetes.io/projected/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-kube-api-access-vxwvf\") pod \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.457853 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-config-data\") pod \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\" (UID: \"2aadbf74-6733-453e-bdfc-f8b8bb7f60da\") " Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.458239 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfj4p\" (UniqueName: \"kubernetes.io/projected/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-kube-api-access-lfj4p\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.458259 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rx8n\" (UniqueName: \"kubernetes.io/projected/4881af57-6b36-4c72-ba21-4ac8872288a1-kube-api-access-5rx8n\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.458270 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.458281 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4881af57-6b36-4c72-ba21-4ac8872288a1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.462685 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-kube-api-access-vxwvf" (OuterVolumeSpecName: "kube-api-access-vxwvf") pod "2aadbf74-6733-453e-bdfc-f8b8bb7f60da" (UID: "2aadbf74-6733-453e-bdfc-f8b8bb7f60da"). InnerVolumeSpecName "kube-api-access-vxwvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.493505 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2aadbf74-6733-453e-bdfc-f8b8bb7f60da" (UID: "2aadbf74-6733-453e-bdfc-f8b8bb7f60da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.532013 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-config-data" (OuterVolumeSpecName: "config-data") pod "2aadbf74-6733-453e-bdfc-f8b8bb7f60da" (UID: "2aadbf74-6733-453e-bdfc-f8b8bb7f60da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.559950 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.560226 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxwvf\" (UniqueName: \"kubernetes.io/projected/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-kube-api-access-vxwvf\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:16 crc kubenswrapper[4711]: I1205 12:29:16.560333 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadbf74-6733-453e-bdfc-f8b8bb7f60da-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.194230 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerStarted","Data":"64db569f013b01f98c7de191fc5cc3bcdd9cb12c08d29fafcb12d74fe786cd5c"} Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.197037 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ca3b-account-create-update-mnv67" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.198565 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-6d77-account-create-update-gh4ds" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.198688 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-h26mq" event={"ID":"c46ee7cc-06c9-41b0-b560-cc35c14dbf00","Type":"ContainerStarted","Data":"e4a2d27b45a182feaf0dd75f0135bed588d1ad8b2e82f9ab5336fe28965b68a9"} Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.198857 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-np4sn" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.199011 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ldxm8" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.231822 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-h26mq" podStartSLOduration=2.761148909 podStartE2EDuration="10.231803164s" podCreationTimestamp="2025-12-05 12:29:07 +0000 UTC" firstStartedPulling="2025-12-05 12:29:08.518379935 +0000 UTC m=+1194.102702265" lastFinishedPulling="2025-12-05 12:29:15.98903418 +0000 UTC m=+1201.573356520" observedRunningTime="2025-12-05 12:29:17.220940357 +0000 UTC m=+1202.805262697" watchObservedRunningTime="2025-12-05 12:29:17.231803164 +0000 UTC m=+1202.816125494" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.647189 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b96779c97-9xp5t"] Dec 05 12:29:17 crc kubenswrapper[4711]: E1205 12:29:17.647657 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847c5ee7-c53a-4173-82ad-313e68c42910" containerName="mariadb-database-create" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.647676 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="847c5ee7-c53a-4173-82ad-313e68c42910" containerName="mariadb-database-create" Dec 05 12:29:17 crc kubenswrapper[4711]: E1205 12:29:17.647696 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aadbf74-6733-453e-bdfc-f8b8bb7f60da" containerName="keystone-db-sync" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.647702 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aadbf74-6733-453e-bdfc-f8b8bb7f60da" containerName="keystone-db-sync" Dec 05 12:29:17 crc kubenswrapper[4711]: E1205 12:29:17.647713 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4881af57-6b36-4c72-ba21-4ac8872288a1" containerName="mariadb-account-create-update" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.647727 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4881af57-6b36-4c72-ba21-4ac8872288a1" containerName="mariadb-account-create-update" Dec 05 12:29:17 crc kubenswrapper[4711]: E1205 12:29:17.647751 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71" containerName="mariadb-account-create-update" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.647759 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71" containerName="mariadb-account-create-update" Dec 05 12:29:17 crc kubenswrapper[4711]: E1205 12:29:17.647777 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12a5b3d-46d8-41a5-95c8-1f95f238c8b1" containerName="mariadb-database-create" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.647785 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12a5b3d-46d8-41a5-95c8-1f95f238c8b1" containerName="mariadb-database-create" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.647978 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2aadbf74-6733-453e-bdfc-f8b8bb7f60da" containerName="keystone-db-sync" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.648001 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4881af57-6b36-4c72-ba21-4ac8872288a1" containerName="mariadb-account-create-update" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.648013 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d12a5b3d-46d8-41a5-95c8-1f95f238c8b1" containerName="mariadb-database-create" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.648025 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71" containerName="mariadb-account-create-update" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.648037 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="847c5ee7-c53a-4173-82ad-313e68c42910" containerName="mariadb-database-create" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.649140 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.670782 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b96779c97-9xp5t"] Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.751094 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-2mdjb"] Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.752924 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.756333 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.756510 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-7sbtb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.757073 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.767699 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.767873 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.769903 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2mdjb"] Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.783184 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-svc\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.783226 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmksb\" (UniqueName: \"kubernetes.io/projected/81ab17d0-8820-4d4c-9024-8d2f51b6f158-kube-api-access-pmksb\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.783276 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-sb\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.783381 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-nb\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.783427 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-config\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.783449 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-swift-storage-0\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886484 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-scripts\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886578 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-combined-ca-bundle\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886606 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-credential-keys\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886701 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-nb\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886728 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-fernet-keys\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886751 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-config\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886771 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-swift-storage-0\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886860 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-config-data\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886906 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-svc\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886930 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmksb\" (UniqueName: \"kubernetes.io/projected/81ab17d0-8820-4d4c-9024-8d2f51b6f158-kube-api-access-pmksb\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.886972 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cxw6\" (UniqueName: \"kubernetes.io/projected/d923834d-2dfd-4565-b2f8-958e48911810-kube-api-access-5cxw6\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.887008 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-sb\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.888334 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-sb\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.889002 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-nb\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.889518 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-config\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.889705 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-svc\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.890091 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-swift-storage-0\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.917632 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b8d8fdfd5-5nhbq"] Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.919480 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.923611 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-2ktjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.930008 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmksb\" (UniqueName: \"kubernetes.io/projected/81ab17d0-8820-4d4c-9024-8d2f51b6f158-kube-api-access-pmksb\") pod \"dnsmasq-dns-5b96779c97-9xp5t\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.949575 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b8d8fdfd5-5nhbq"] Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.956432 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.965799 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.974938 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.989491 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-scripts\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.989548 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-combined-ca-bundle\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.989577 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-credential-keys\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.989607 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-fernet-keys\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.989649 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-config-data\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.989692 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cxw6\" (UniqueName: \"kubernetes.io/projected/d923834d-2dfd-4565-b2f8-958e48911810-kube-api-access-5cxw6\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.990341 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-kwn6b"] Dec 05 12:29:17 crc kubenswrapper[4711]: I1205 12:29:17.999812 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-fernet-keys\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.000837 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.005304 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-config-data\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.005957 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.007047 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.007809 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-scripts\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.021924 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-credential-keys\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.022377 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-85sx7" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.022652 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.025170 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-combined-ca-bundle\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.047260 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cxw6\" (UniqueName: \"kubernetes.io/projected/d923834d-2dfd-4565-b2f8-958e48911810-kube-api-access-5cxw6\") pod \"keystone-bootstrap-2mdjb\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.089895 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kwn6b"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.091318 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-config-data\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.091380 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f955aa55-498a-457a-8bf3-9214f5751e47-horizon-secret-key\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.091440 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955aa55-498a-457a-8bf3-9214f5751e47-logs\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.091456 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-scripts\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.091477 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8277\" (UniqueName: \"kubernetes.io/projected/f955aa55-498a-457a-8bf3-9214f5751e47-kube-api-access-x8277\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.128596 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-j6vdf"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.130373 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.132133 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.135060 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.135613 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-j6gqm" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.138321 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.155845 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-j6vdf"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.175980 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-649d45d4d9-fm5xf"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.177588 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.188576 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-649d45d4d9-fm5xf"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197074 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442afda2-58a4-42e0-8793-08854bf5a587-logs\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197130 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-combined-ca-bundle\") pod \"neutron-db-sync-kwn6b\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197155 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-scripts\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197178 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5jpd\" (UniqueName: \"kubernetes.io/projected/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-kube-api-access-c5jpd\") pod \"neutron-db-sync-kwn6b\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197202 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-config-data\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197223 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-config\") pod \"neutron-db-sync-kwn6b\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197246 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-etc-machine-id\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197267 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7rsb\" (UniqueName: \"kubernetes.io/projected/442afda2-58a4-42e0-8793-08854bf5a587-kube-api-access-l7rsb\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197325 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-combined-ca-bundle\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197353 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-config-data\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197373 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442afda2-58a4-42e0-8793-08854bf5a587-horizon-secret-key\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197412 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f955aa55-498a-457a-8bf3-9214f5751e47-horizon-secret-key\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197432 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-scripts\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197518 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7spsq\" (UniqueName: \"kubernetes.io/projected/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-kube-api-access-7spsq\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197581 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955aa55-498a-457a-8bf3-9214f5751e47-logs\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197608 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-scripts\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197633 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8277\" (UniqueName: \"kubernetes.io/projected/f955aa55-498a-457a-8bf3-9214f5751e47-kube-api-access-x8277\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197657 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-config-data\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.197687 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-db-sync-config-data\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.198039 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955aa55-498a-457a-8bf3-9214f5751e47-logs\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.198623 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-scripts\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.198715 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-config-data\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.249309 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b96779c97-9xp5t"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301214 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-scripts\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301295 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5jpd\" (UniqueName: \"kubernetes.io/projected/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-kube-api-access-c5jpd\") pod \"neutron-db-sync-kwn6b\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301336 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-config-data\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301412 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-config\") pod \"neutron-db-sync-kwn6b\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301454 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-etc-machine-id\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301499 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7rsb\" (UniqueName: \"kubernetes.io/projected/442afda2-58a4-42e0-8793-08854bf5a587-kube-api-access-l7rsb\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301536 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-combined-ca-bundle\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301571 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442afda2-58a4-42e0-8793-08854bf5a587-horizon-secret-key\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301630 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-scripts\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301706 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7spsq\" (UniqueName: \"kubernetes.io/projected/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-kube-api-access-7spsq\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301763 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-config-data\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301807 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-db-sync-config-data\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301858 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442afda2-58a4-42e0-8793-08854bf5a587-logs\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.301914 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-combined-ca-bundle\") pod \"neutron-db-sync-kwn6b\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.310715 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-scripts\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.312702 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-config-data\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.320162 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-etc-machine-id\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.338450 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f955aa55-498a-457a-8bf3-9214f5751e47-horizon-secret-key\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.338753 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442afda2-58a4-42e0-8793-08854bf5a587-logs\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.357778 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-7sl8r"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.366247 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.370525 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-4fxgp" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.370705 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.370877 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.413181 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-7sl8r"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.432971 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8277\" (UniqueName: \"kubernetes.io/projected/f955aa55-498a-457a-8bf3-9214f5751e47-kube-api-access-x8277\") pod \"horizon-7b8d8fdfd5-5nhbq\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.435305 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-combined-ca-bundle\") pod \"neutron-db-sync-kwn6b\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.435991 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-scripts\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.437561 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442afda2-58a4-42e0-8793-08854bf5a587-horizon-secret-key\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.438725 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-db-sync-config-data\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.440864 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-config-data\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.444012 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-combined-ca-bundle\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.454306 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7rsb\" (UniqueName: \"kubernetes.io/projected/442afda2-58a4-42e0-8793-08854bf5a587-kube-api-access-l7rsb\") pod \"horizon-649d45d4d9-fm5xf\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.456401 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7spsq\" (UniqueName: \"kubernetes.io/projected/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-kube-api-access-7spsq\") pod \"cinder-db-sync-j6vdf\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.493149 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-config\") pod \"neutron-db-sync-kwn6b\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.494006 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5jpd\" (UniqueName: \"kubernetes.io/projected/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-kube-api-access-c5jpd\") pod \"neutron-db-sync-kwn6b\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.510500 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7658cc989c-rdbv8"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.513092 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.523330 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.526368 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.536441 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.537535 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lds9g\" (UniqueName: \"kubernetes.io/projected/89800be3-c463-4e1e-b92a-abb613b5bf5e-kube-api-access-lds9g\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.537581 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-scripts\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.537638 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-combined-ca-bundle\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.537654 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89800be3-c463-4e1e-b92a-abb613b5bf5e-logs\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.537685 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-config-data\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.537847 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.548286 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-sm4bg"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.551629 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.559548 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7658cc989c-rdbv8"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.560054 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2ckjg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.560345 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.568422 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.577345 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-sm4bg"] Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.640880 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-scripts\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.640970 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-config-data\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641043 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641102 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-svc\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641120 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-log-httpd\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641142 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-swift-storage-0\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641175 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbh84\" (UniqueName: \"kubernetes.io/projected/1f496d6f-8883-493b-866b-117f6b7537e4-kube-api-access-qbh84\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641262 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-sb\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641299 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lds9g\" (UniqueName: \"kubernetes.io/projected/89800be3-c463-4e1e-b92a-abb613b5bf5e-kube-api-access-lds9g\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641341 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-scripts\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641379 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-nb\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641433 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-config\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641594 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641654 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-combined-ca-bundle\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641679 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89800be3-c463-4e1e-b92a-abb613b5bf5e-logs\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641718 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-run-httpd\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641739 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-config-data\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.641758 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb8lx\" (UniqueName: \"kubernetes.io/projected/a059dfe5-97d3-412e-b70b-430bd3ab92b9-kube-api-access-hb8lx\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.642336 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89800be3-c463-4e1e-b92a-abb613b5bf5e-logs\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.647028 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-scripts\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.679198 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lds9g\" (UniqueName: \"kubernetes.io/projected/89800be3-c463-4e1e-b92a-abb613b5bf5e-kube-api-access-lds9g\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.696244 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.706812 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-combined-ca-bundle\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.722701 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.729672 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-config-data\") pod \"placement-db-sync-7sl8r\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.745770 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-sb\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.745866 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-nb\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.745891 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-config\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.745922 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.745978 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wpzd\" (UniqueName: \"kubernetes.io/projected/ddff9a4d-a020-4de4-a114-694bec9908f9-kube-api-access-2wpzd\") pod \"barbican-db-sync-sm4bg\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746000 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-combined-ca-bundle\") pod \"barbican-db-sync-sm4bg\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746056 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-run-httpd\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746083 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb8lx\" (UniqueName: \"kubernetes.io/projected/a059dfe5-97d3-412e-b70b-430bd3ab92b9-kube-api-access-hb8lx\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746107 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-scripts\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746128 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-config-data\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746174 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-db-sync-config-data\") pod \"barbican-db-sync-sm4bg\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746200 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746247 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-log-httpd\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746265 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-svc\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746288 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-swift-storage-0\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.746312 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbh84\" (UniqueName: \"kubernetes.io/projected/1f496d6f-8883-493b-866b-117f6b7537e4-kube-api-access-qbh84\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.748003 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-sb\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.748054 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-log-httpd\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.754234 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.754632 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-run-httpd\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.755205 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.756772 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-nb\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.758370 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-config-data\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.765136 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-config\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.768260 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-svc\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.768990 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.769254 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-swift-storage-0\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.770422 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbh84\" (UniqueName: \"kubernetes.io/projected/1f496d6f-8883-493b-866b-117f6b7537e4-kube-api-access-qbh84\") pod \"dnsmasq-dns-7658cc989c-rdbv8\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.775182 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.776599 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb8lx\" (UniqueName: \"kubernetes.io/projected/a059dfe5-97d3-412e-b70b-430bd3ab92b9-kube-api-access-hb8lx\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.787167 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-scripts\") pod \"ceilometer-0\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.795661 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7sl8r" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.802273 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.823144 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.851741 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wpzd\" (UniqueName: \"kubernetes.io/projected/ddff9a4d-a020-4de4-a114-694bec9908f9-kube-api-access-2wpzd\") pod \"barbican-db-sync-sm4bg\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.851822 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-combined-ca-bundle\") pod \"barbican-db-sync-sm4bg\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.851950 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-db-sync-config-data\") pod \"barbican-db-sync-sm4bg\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.856371 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-combined-ca-bundle\") pod \"barbican-db-sync-sm4bg\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.857277 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-db-sync-config-data\") pod \"barbican-db-sync-sm4bg\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:18 crc kubenswrapper[4711]: I1205 12:29:18.873867 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wpzd\" (UniqueName: \"kubernetes.io/projected/ddff9a4d-a020-4de4-a114-694bec9908f9-kube-api-access-2wpzd\") pod \"barbican-db-sync-sm4bg\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:19 crc kubenswrapper[4711]: I1205 12:29:19.004912 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b96779c97-9xp5t"] Dec 05 12:29:19 crc kubenswrapper[4711]: I1205 12:29:19.135856 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:29:19 crc kubenswrapper[4711]: I1205 12:29:19.146333 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2mdjb"] Dec 05 12:29:19 crc kubenswrapper[4711]: I1205 12:29:19.299697 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kwn6b"] Dec 05 12:29:19 crc kubenswrapper[4711]: I1205 12:29:19.305373 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" event={"ID":"81ab17d0-8820-4d4c-9024-8d2f51b6f158","Type":"ContainerStarted","Data":"f2b802b167f0187d05f626e60732717efc52987e0dbe959890414402f3e884b6"} Dec 05 12:29:19 crc kubenswrapper[4711]: I1205 12:29:19.585493 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-649d45d4d9-fm5xf"] Dec 05 12:29:19 crc kubenswrapper[4711]: W1205 12:29:19.644407 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod442afda2_58a4_42e0_8793_08854bf5a587.slice/crio-ab16a2d81ac8010e9bb9364c2245e6525606d730f3804db5eb311b1407af27b5 WatchSource:0}: Error finding container ab16a2d81ac8010e9bb9364c2245e6525606d730f3804db5eb311b1407af27b5: Status 404 returned error can't find the container with id ab16a2d81ac8010e9bb9364c2245e6525606d730f3804db5eb311b1407af27b5 Dec 05 12:29:19 crc kubenswrapper[4711]: I1205 12:29:19.822690 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b8d8fdfd5-5nhbq"] Dec 05 12:29:19 crc kubenswrapper[4711]: W1205 12:29:19.849472 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf955aa55_498a_457a_8bf3_9214f5751e47.slice/crio-cecdd825ba06bc7c432b983723b53c4a8b39a0f611b3c22c34559cbcd194efa1 WatchSource:0}: Error finding container cecdd825ba06bc7c432b983723b53c4a8b39a0f611b3c22c34559cbcd194efa1: Status 404 returned error can't find the container with id cecdd825ba06bc7c432b983723b53c4a8b39a0f611b3c22c34559cbcd194efa1 Dec 05 12:29:19 crc kubenswrapper[4711]: I1205 12:29:19.857546 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-j6vdf"] Dec 05 12:29:19 crc kubenswrapper[4711]: W1205 12:29:19.873817 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda5d7bb4_71d2_458f_aabf_4cb2ed2f4661.slice/crio-9cac137ba8502b5df95f1fef354db36a05ded81f14fbf84694f875a792b5f807 WatchSource:0}: Error finding container 9cac137ba8502b5df95f1fef354db36a05ded81f14fbf84694f875a792b5f807: Status 404 returned error can't find the container with id 9cac137ba8502b5df95f1fef354db36a05ded81f14fbf84694f875a792b5f807 Dec 05 12:29:19 crc kubenswrapper[4711]: I1205 12:29:19.960586 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-sm4bg"] Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.006845 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7658cc989c-rdbv8"] Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.038813 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.068439 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-7sl8r"] Dec 05 12:29:20 crc kubenswrapper[4711]: W1205 12:29:20.069324 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f496d6f_8883_493b_866b_117f6b7537e4.slice/crio-52c3df0f2f8b4be4f732bafb25b134cd6ceaaf9ed61888a915be10930b7a2042 WatchSource:0}: Error finding container 52c3df0f2f8b4be4f732bafb25b134cd6ceaaf9ed61888a915be10930b7a2042: Status 404 returned error can't find the container with id 52c3df0f2f8b4be4f732bafb25b134cd6ceaaf9ed61888a915be10930b7a2042 Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.386584 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2mdjb" event={"ID":"d923834d-2dfd-4565-b2f8-958e48911810","Type":"ContainerStarted","Data":"14e3a4dc74c78be10689e9531392db409015217d81330978947af90ce33ec14d"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.386676 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2mdjb" event={"ID":"d923834d-2dfd-4565-b2f8-958e48911810","Type":"ContainerStarted","Data":"d9421cb7813d01dbc6a56b40fb6ff35041c95e46353f286f045b4b8baedec812"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.405026 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8d8fdfd5-5nhbq" event={"ID":"f955aa55-498a-457a-8bf3-9214f5751e47","Type":"ContainerStarted","Data":"cecdd825ba06bc7c432b983723b53c4a8b39a0f611b3c22c34559cbcd194efa1"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.425296 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a059dfe5-97d3-412e-b70b-430bd3ab92b9","Type":"ContainerStarted","Data":"a1d0ce754db2dd80247b96db2dc906d145b888ed7726b02523cef4b3ef318f27"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.450689 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-j6vdf" event={"ID":"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661","Type":"ContainerStarted","Data":"9cac137ba8502b5df95f1fef354db36a05ded81f14fbf84694f875a792b5f807"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.467895 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kwn6b" event={"ID":"37cbdd67-d3c7-4318-9a73-82b9a4e249fa","Type":"ContainerStarted","Data":"103600391a03d143244c4b72b376da853989a793ca2842d0221c71aea76bff84"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.467974 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kwn6b" event={"ID":"37cbdd67-d3c7-4318-9a73-82b9a4e249fa","Type":"ContainerStarted","Data":"d1c577540bea510f5f338354e1b3f067a755d337312ffc62f349ee8fc79e9583"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.479607 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-649d45d4d9-fm5xf" event={"ID":"442afda2-58a4-42e0-8793-08854bf5a587","Type":"ContainerStarted","Data":"ab16a2d81ac8010e9bb9364c2245e6525606d730f3804db5eb311b1407af27b5"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.490114 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" event={"ID":"1f496d6f-8883-493b-866b-117f6b7537e4","Type":"ContainerStarted","Data":"52c3df0f2f8b4be4f732bafb25b134cd6ceaaf9ed61888a915be10930b7a2042"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.495624 4711 generic.go:334] "Generic (PLEG): container finished" podID="81ab17d0-8820-4d4c-9024-8d2f51b6f158" containerID="fe31320e6e5e4d58e3ad98bc8198cb0000061fb415f0f0b4dd5d9f606bc40966" exitCode=0 Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.496190 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" event={"ID":"81ab17d0-8820-4d4c-9024-8d2f51b6f158","Type":"ContainerDied","Data":"fe31320e6e5e4d58e3ad98bc8198cb0000061fb415f0f0b4dd5d9f606bc40966"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.500012 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-2mdjb" podStartSLOduration=3.499989562 podStartE2EDuration="3.499989562s" podCreationTimestamp="2025-12-05 12:29:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:29:20.438810633 +0000 UTC m=+1206.023132963" watchObservedRunningTime="2025-12-05 12:29:20.499989562 +0000 UTC m=+1206.084311892" Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.509647 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-sm4bg" event={"ID":"ddff9a4d-a020-4de4-a114-694bec9908f9","Type":"ContainerStarted","Data":"33008daecc79f4777cf9131251960b5e280a934dd75dc5cae4d1370495e62487"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.522174 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7sl8r" event={"ID":"89800be3-c463-4e1e-b92a-abb613b5bf5e","Type":"ContainerStarted","Data":"6a63629e01f86610645e2e3802d89142801dcafdc0fdb08c3d0acbc34b473d08"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.553477 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerStarted","Data":"9a8e0e7b788fc223ff863a7440733725f70b6ad22d09f94970b37cfc05e13032"} Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.554357 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-kwn6b" podStartSLOduration=3.554328655 podStartE2EDuration="3.554328655s" podCreationTimestamp="2025-12-05 12:29:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:29:20.509844054 +0000 UTC m=+1206.094166384" watchObservedRunningTime="2025-12-05 12:29:20.554328655 +0000 UTC m=+1206.138650985" Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.659129 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.759170 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b8d8fdfd5-5nhbq"] Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.844477 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6788c646d7-4kxrg"] Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.853624 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.894002 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6788c646d7-4kxrg"] Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.983514 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-horizon-secret-key\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.983989 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-scripts\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.984036 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-logs\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.984076 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l85st\" (UniqueName: \"kubernetes.io/projected/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-kube-api-access-l85st\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:20 crc kubenswrapper[4711]: I1205 12:29:20.984101 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-config-data\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.039223 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085049 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-sb\") pod \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085123 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-nb\") pod \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085154 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmksb\" (UniqueName: \"kubernetes.io/projected/81ab17d0-8820-4d4c-9024-8d2f51b6f158-kube-api-access-pmksb\") pod \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085194 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-svc\") pod \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085274 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-config\") pod \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085304 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-swift-storage-0\") pod \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\" (UID: \"81ab17d0-8820-4d4c-9024-8d2f51b6f158\") " Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085675 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-horizon-secret-key\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085732 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-scripts\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085769 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-logs\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085804 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l85st\" (UniqueName: \"kubernetes.io/projected/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-kube-api-access-l85st\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.085826 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-config-data\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.087031 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-config-data\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.095802 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81ab17d0-8820-4d4c-9024-8d2f51b6f158-kube-api-access-pmksb" (OuterVolumeSpecName: "kube-api-access-pmksb") pod "81ab17d0-8820-4d4c-9024-8d2f51b6f158" (UID: "81ab17d0-8820-4d4c-9024-8d2f51b6f158"). InnerVolumeSpecName "kube-api-access-pmksb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.099133 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-scripts\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.099761 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-logs\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.105195 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-horizon-secret-key\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.121360 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "81ab17d0-8820-4d4c-9024-8d2f51b6f158" (UID: "81ab17d0-8820-4d4c-9024-8d2f51b6f158"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.125098 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "81ab17d0-8820-4d4c-9024-8d2f51b6f158" (UID: "81ab17d0-8820-4d4c-9024-8d2f51b6f158"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.125946 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l85st\" (UniqueName: \"kubernetes.io/projected/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-kube-api-access-l85st\") pod \"horizon-6788c646d7-4kxrg\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.143367 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "81ab17d0-8820-4d4c-9024-8d2f51b6f158" (UID: "81ab17d0-8820-4d4c-9024-8d2f51b6f158"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.145864 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-config" (OuterVolumeSpecName: "config") pod "81ab17d0-8820-4d4c-9024-8d2f51b6f158" (UID: "81ab17d0-8820-4d4c-9024-8d2f51b6f158"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.147477 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "81ab17d0-8820-4d4c-9024-8d2f51b6f158" (UID: "81ab17d0-8820-4d4c-9024-8d2f51b6f158"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.187698 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.187734 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.187749 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmksb\" (UniqueName: \"kubernetes.io/projected/81ab17d0-8820-4d4c-9024-8d2f51b6f158-kube-api-access-pmksb\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.187765 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.187793 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.187804 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81ab17d0-8820-4d4c-9024-8d2f51b6f158-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.196347 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.570990 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" event={"ID":"81ab17d0-8820-4d4c-9024-8d2f51b6f158","Type":"ContainerDied","Data":"f2b802b167f0187d05f626e60732717efc52987e0dbe959890414402f3e884b6"} Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.571470 4711 scope.go:117] "RemoveContainer" containerID="fe31320e6e5e4d58e3ad98bc8198cb0000061fb415f0f0b4dd5d9f606bc40966" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.571276 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b96779c97-9xp5t" Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.640467 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b96779c97-9xp5t"] Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.644748 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b96779c97-9xp5t"] Dec 05 12:29:21 crc kubenswrapper[4711]: I1205 12:29:21.721482 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6788c646d7-4kxrg"] Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.413924 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-5gbrt"] Dec 05 12:29:22 crc kubenswrapper[4711]: E1205 12:29:22.414689 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81ab17d0-8820-4d4c-9024-8d2f51b6f158" containerName="init" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.414704 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="81ab17d0-8820-4d4c-9024-8d2f51b6f158" containerName="init" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.414964 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="81ab17d0-8820-4d4c-9024-8d2f51b6f158" containerName="init" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.424150 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-5gbrt"] Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.424277 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.428879 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.429166 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-c4pgg" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.524949 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-config-data\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.525138 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msfbn\" (UniqueName: \"kubernetes.io/projected/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-kube-api-access-msfbn\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.525302 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-db-sync-config-data\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.525710 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-combined-ca-bundle\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.584698 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerStarted","Data":"e8378c9832568aa42123949e45f327e71584d9dbf7ab03898265476e95813d8c"} Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.626891 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-db-sync-config-data\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.627001 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-combined-ca-bundle\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.627032 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-config-data\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.627072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msfbn\" (UniqueName: \"kubernetes.io/projected/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-kube-api-access-msfbn\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.632794 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-db-sync-config-data\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.635051 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-config-data\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.636175 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-combined-ca-bundle\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.653072 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msfbn\" (UniqueName: \"kubernetes.io/projected/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-kube-api-access-msfbn\") pod \"glance-db-sync-5gbrt\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.696821 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81ab17d0-8820-4d4c-9024-8d2f51b6f158" path="/var/lib/kubelet/pods/81ab17d0-8820-4d4c-9024-8d2f51b6f158/volumes" Dec 05 12:29:22 crc kubenswrapper[4711]: I1205 12:29:22.748245 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5gbrt" Dec 05 12:29:23 crc kubenswrapper[4711]: I1205 12:29:23.611198 4711 generic.go:334] "Generic (PLEG): container finished" podID="1f496d6f-8883-493b-866b-117f6b7537e4" containerID="bae130e97d4ecb80284d9fbfbab2c1c20b252c7789a860b1ff177f6099a146d8" exitCode=0 Dec 05 12:29:23 crc kubenswrapper[4711]: I1205 12:29:23.613634 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" event={"ID":"1f496d6f-8883-493b-866b-117f6b7537e4","Type":"ContainerDied","Data":"bae130e97d4ecb80284d9fbfbab2c1c20b252c7789a860b1ff177f6099a146d8"} Dec 05 12:29:23 crc kubenswrapper[4711]: I1205 12:29:23.618488 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6788c646d7-4kxrg" event={"ID":"5f5e58bb-1049-4d61-b78a-a192ae4fe61a","Type":"ContainerStarted","Data":"09684f722133fd58172e4547768832e42a2a549d181b123cfa520ae9a3ce7e5a"} Dec 05 12:29:23 crc kubenswrapper[4711]: I1205 12:29:23.886935 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=18.886915522 podStartE2EDuration="18.886915522s" podCreationTimestamp="2025-12-05 12:29:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:29:23.674145845 +0000 UTC m=+1209.258468175" watchObservedRunningTime="2025-12-05 12:29:23.886915522 +0000 UTC m=+1209.471237852" Dec 05 12:29:23 crc kubenswrapper[4711]: I1205 12:29:23.899285 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-5gbrt"] Dec 05 12:29:23 crc kubenswrapper[4711]: W1205 12:29:23.918331 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e7e345d_f9d1_4c96_9da9_b960d54c7b5b.slice/crio-c55ff26c7715ca829e214e783d855c9f4e18cb2d3c4230a2d12080d271a1b102 WatchSource:0}: Error finding container c55ff26c7715ca829e214e783d855c9f4e18cb2d3c4230a2d12080d271a1b102: Status 404 returned error can't find the container with id c55ff26c7715ca829e214e783d855c9f4e18cb2d3c4230a2d12080d271a1b102 Dec 05 12:29:24 crc kubenswrapper[4711]: I1205 12:29:24.653051 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" event={"ID":"1f496d6f-8883-493b-866b-117f6b7537e4","Type":"ContainerStarted","Data":"154ef17e604562451a343074d4609420936fb40637503df88e5ccf3e0ea60f8f"} Dec 05 12:29:24 crc kubenswrapper[4711]: I1205 12:29:24.653412 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:24 crc kubenswrapper[4711]: I1205 12:29:24.655076 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5gbrt" event={"ID":"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b","Type":"ContainerStarted","Data":"c55ff26c7715ca829e214e783d855c9f4e18cb2d3c4230a2d12080d271a1b102"} Dec 05 12:29:24 crc kubenswrapper[4711]: I1205 12:29:24.688339 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" podStartSLOduration=6.688319354 podStartE2EDuration="6.688319354s" podCreationTimestamp="2025-12-05 12:29:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:29:24.680313948 +0000 UTC m=+1210.264636288" watchObservedRunningTime="2025-12-05 12:29:24.688319354 +0000 UTC m=+1210.272641684" Dec 05 12:29:25 crc kubenswrapper[4711]: I1205 12:29:25.680969 4711 generic.go:334] "Generic (PLEG): container finished" podID="c46ee7cc-06c9-41b0-b560-cc35c14dbf00" containerID="e4a2d27b45a182feaf0dd75f0135bed588d1ad8b2e82f9ab5336fe28965b68a9" exitCode=0 Dec 05 12:29:25 crc kubenswrapper[4711]: I1205 12:29:25.681026 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-h26mq" event={"ID":"c46ee7cc-06c9-41b0-b560-cc35c14dbf00","Type":"ContainerDied","Data":"e4a2d27b45a182feaf0dd75f0135bed588d1ad8b2e82f9ab5336fe28965b68a9"} Dec 05 12:29:25 crc kubenswrapper[4711]: I1205 12:29:25.687299 4711 generic.go:334] "Generic (PLEG): container finished" podID="d923834d-2dfd-4565-b2f8-958e48911810" containerID="14e3a4dc74c78be10689e9531392db409015217d81330978947af90ce33ec14d" exitCode=0 Dec 05 12:29:25 crc kubenswrapper[4711]: I1205 12:29:25.687556 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2mdjb" event={"ID":"d923834d-2dfd-4565-b2f8-958e48911810","Type":"ContainerDied","Data":"14e3a4dc74c78be10689e9531392db409015217d81330978947af90ce33ec14d"} Dec 05 12:29:26 crc kubenswrapper[4711]: I1205 12:29:26.044279 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.237005 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-649d45d4d9-fm5xf"] Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.265322 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-744fd5f788-bs9bc"] Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.267324 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.271425 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.284709 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-744fd5f788-bs9bc"] Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.319105 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6788c646d7-4kxrg"] Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.345668 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6856c6c898-9lzvt"] Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.347144 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.360082 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-config-data\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.360245 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vhc6\" (UniqueName: \"kubernetes.io/projected/53844e89-65d7-4cbc-b375-dbfef360857b-kube-api-access-7vhc6\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.360293 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-combined-ca-bundle\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.360318 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53844e89-65d7-4cbc-b375-dbfef360857b-logs\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.360363 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-scripts\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.360442 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-tls-certs\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.360468 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-secret-key\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.377036 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6856c6c898-9lzvt"] Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462115 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-tls-certs\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462194 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-secret-key\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462254 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba5de8d4-e693-4431-aee3-0ba498f62e8e-config-data\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462290 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-config-data\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462316 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba5de8d4-e693-4431-aee3-0ba498f62e8e-logs\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462341 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba5de8d4-e693-4431-aee3-0ba498f62e8e-horizon-tls-certs\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462451 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr2fd\" (UniqueName: \"kubernetes.io/projected/ba5de8d4-e693-4431-aee3-0ba498f62e8e-kube-api-access-dr2fd\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462523 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vhc6\" (UniqueName: \"kubernetes.io/projected/53844e89-65d7-4cbc-b375-dbfef360857b-kube-api-access-7vhc6\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462549 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba5de8d4-e693-4431-aee3-0ba498f62e8e-scripts\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462585 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-combined-ca-bundle\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462604 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53844e89-65d7-4cbc-b375-dbfef360857b-logs\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462652 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-scripts\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462678 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba5de8d4-e693-4431-aee3-0ba498f62e8e-combined-ca-bundle\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.462705 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ba5de8d4-e693-4431-aee3-0ba498f62e8e-horizon-secret-key\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.463580 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53844e89-65d7-4cbc-b375-dbfef360857b-logs\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.463840 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-scripts\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.464361 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-config-data\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.469587 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-tls-certs\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.471324 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-secret-key\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.479957 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-combined-ca-bundle\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.483765 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vhc6\" (UniqueName: \"kubernetes.io/projected/53844e89-65d7-4cbc-b375-dbfef360857b-kube-api-access-7vhc6\") pod \"horizon-744fd5f788-bs9bc\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.563856 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba5de8d4-e693-4431-aee3-0ba498f62e8e-config-data\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.563930 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba5de8d4-e693-4431-aee3-0ba498f62e8e-logs\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.563958 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba5de8d4-e693-4431-aee3-0ba498f62e8e-horizon-tls-certs\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.564046 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr2fd\" (UniqueName: \"kubernetes.io/projected/ba5de8d4-e693-4431-aee3-0ba498f62e8e-kube-api-access-dr2fd\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.564129 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba5de8d4-e693-4431-aee3-0ba498f62e8e-scripts\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.564205 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba5de8d4-e693-4431-aee3-0ba498f62e8e-combined-ca-bundle\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.564237 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ba5de8d4-e693-4431-aee3-0ba498f62e8e-horizon-secret-key\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.565207 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba5de8d4-e693-4431-aee3-0ba498f62e8e-logs\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.565428 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba5de8d4-e693-4431-aee3-0ba498f62e8e-scripts\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.566468 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba5de8d4-e693-4431-aee3-0ba498f62e8e-config-data\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.569442 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba5de8d4-e693-4431-aee3-0ba498f62e8e-horizon-tls-certs\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.569771 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba5de8d4-e693-4431-aee3-0ba498f62e8e-combined-ca-bundle\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.571858 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ba5de8d4-e693-4431-aee3-0ba498f62e8e-horizon-secret-key\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.583454 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr2fd\" (UniqueName: \"kubernetes.io/projected/ba5de8d4-e693-4431-aee3-0ba498f62e8e-kube-api-access-dr2fd\") pod \"horizon-6856c6c898-9lzvt\" (UID: \"ba5de8d4-e693-4431-aee3-0ba498f62e8e\") " pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.605566 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:29:27 crc kubenswrapper[4711]: I1205 12:29:27.675351 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:29:31 crc kubenswrapper[4711]: I1205 12:29:31.924525 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:31 crc kubenswrapper[4711]: I1205 12:29:31.955013 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-credential-keys\") pod \"d923834d-2dfd-4565-b2f8-958e48911810\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " Dec 05 12:29:31 crc kubenswrapper[4711]: I1205 12:29:31.955160 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-config-data\") pod \"d923834d-2dfd-4565-b2f8-958e48911810\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " Dec 05 12:29:31 crc kubenswrapper[4711]: I1205 12:29:31.955281 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-fernet-keys\") pod \"d923834d-2dfd-4565-b2f8-958e48911810\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " Dec 05 12:29:31 crc kubenswrapper[4711]: I1205 12:29:31.955908 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-combined-ca-bundle\") pod \"d923834d-2dfd-4565-b2f8-958e48911810\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " Dec 05 12:29:31 crc kubenswrapper[4711]: I1205 12:29:31.955977 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cxw6\" (UniqueName: \"kubernetes.io/projected/d923834d-2dfd-4565-b2f8-958e48911810-kube-api-access-5cxw6\") pod \"d923834d-2dfd-4565-b2f8-958e48911810\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " Dec 05 12:29:31 crc kubenswrapper[4711]: I1205 12:29:31.956003 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-scripts\") pod \"d923834d-2dfd-4565-b2f8-958e48911810\" (UID: \"d923834d-2dfd-4565-b2f8-958e48911810\") " Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.003981 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d923834d-2dfd-4565-b2f8-958e48911810-kube-api-access-5cxw6" (OuterVolumeSpecName: "kube-api-access-5cxw6") pod "d923834d-2dfd-4565-b2f8-958e48911810" (UID: "d923834d-2dfd-4565-b2f8-958e48911810"). InnerVolumeSpecName "kube-api-access-5cxw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.004004 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d923834d-2dfd-4565-b2f8-958e48911810" (UID: "d923834d-2dfd-4565-b2f8-958e48911810"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.004443 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-scripts" (OuterVolumeSpecName: "scripts") pod "d923834d-2dfd-4565-b2f8-958e48911810" (UID: "d923834d-2dfd-4565-b2f8-958e48911810"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.010621 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-config-data" (OuterVolumeSpecName: "config-data") pod "d923834d-2dfd-4565-b2f8-958e48911810" (UID: "d923834d-2dfd-4565-b2f8-958e48911810"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.012244 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d923834d-2dfd-4565-b2f8-958e48911810" (UID: "d923834d-2dfd-4565-b2f8-958e48911810"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.023613 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d923834d-2dfd-4565-b2f8-958e48911810" (UID: "d923834d-2dfd-4565-b2f8-958e48911810"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.057700 4711 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.057752 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.057765 4711 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.057775 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.057788 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cxw6\" (UniqueName: \"kubernetes.io/projected/d923834d-2dfd-4565-b2f8-958e48911810-kube-api-access-5cxw6\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.057803 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d923834d-2dfd-4565-b2f8-958e48911810-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.782549 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2mdjb" event={"ID":"d923834d-2dfd-4565-b2f8-958e48911810","Type":"ContainerDied","Data":"d9421cb7813d01dbc6a56b40fb6ff35041c95e46353f286f045b4b8baedec812"} Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.782855 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9421cb7813d01dbc6a56b40fb6ff35041c95e46353f286f045b4b8baedec812" Dec 05 12:29:32 crc kubenswrapper[4711]: I1205 12:29:32.782622 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2mdjb" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.009343 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-2mdjb"] Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.020006 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-2mdjb"] Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.115954 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-2g9vv"] Dec 05 12:29:33 crc kubenswrapper[4711]: E1205 12:29:33.116472 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d923834d-2dfd-4565-b2f8-958e48911810" containerName="keystone-bootstrap" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.116498 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d923834d-2dfd-4565-b2f8-958e48911810" containerName="keystone-bootstrap" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.116767 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d923834d-2dfd-4565-b2f8-958e48911810" containerName="keystone-bootstrap" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.117588 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.120195 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.121766 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-7sbtb" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.121942 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.122128 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.126506 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.146430 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2g9vv"] Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.177596 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-scripts\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.177788 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-credential-keys\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.177960 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-config-data\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.178031 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-combined-ca-bundle\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.178164 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brndd\" (UniqueName: \"kubernetes.io/projected/1004a56f-792b-4dae-bee1-6be07e0d72dc-kube-api-access-brndd\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.178313 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-fernet-keys\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.279997 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-config-data\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.280064 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-combined-ca-bundle\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.280109 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brndd\" (UniqueName: \"kubernetes.io/projected/1004a56f-792b-4dae-bee1-6be07e0d72dc-kube-api-access-brndd\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.280148 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-fernet-keys\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.280209 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-scripts\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.280286 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-credential-keys\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.285001 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-scripts\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.285068 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-credential-keys\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.286260 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-combined-ca-bundle\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.286618 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-fernet-keys\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.286841 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-config-data\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.300064 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brndd\" (UniqueName: \"kubernetes.io/projected/1004a56f-792b-4dae-bee1-6be07e0d72dc-kube-api-access-brndd\") pod \"keystone-bootstrap-2g9vv\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.445021 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.812521 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.872920 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-859dfd4ddf-jlv5q"] Dec 05 12:29:33 crc kubenswrapper[4711]: I1205 12:29:33.873157 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="dnsmasq-dns" containerID="cri-o://62ba5a1faa76c1fe3b40d4beaa6c236d0f5717efe388d3f1ae5ccef8b89cc838" gracePeriod=10 Dec 05 12:29:34 crc kubenswrapper[4711]: I1205 12:29:34.452236 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Dec 05 12:29:34 crc kubenswrapper[4711]: I1205 12:29:34.701368 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d923834d-2dfd-4565-b2f8-958e48911810" path="/var/lib/kubelet/pods/d923834d-2dfd-4565-b2f8-958e48911810/volumes" Dec 05 12:29:34 crc kubenswrapper[4711]: I1205 12:29:34.800509 4711 generic.go:334] "Generic (PLEG): container finished" podID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerID="62ba5a1faa76c1fe3b40d4beaa6c236d0f5717efe388d3f1ae5ccef8b89cc838" exitCode=0 Dec 05 12:29:34 crc kubenswrapper[4711]: I1205 12:29:34.800561 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" event={"ID":"cc0d288d-3af3-4af4-9461-a70f2e14d5da","Type":"ContainerDied","Data":"62ba5a1faa76c1fe3b40d4beaa6c236d0f5717efe388d3f1ae5ccef8b89cc838"} Dec 05 12:29:36 crc kubenswrapper[4711]: I1205 12:29:36.043734 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:36 crc kubenswrapper[4711]: I1205 12:29:36.050599 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:36 crc kubenswrapper[4711]: E1205 12:29:36.497017 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-placement-api:watcher_latest" Dec 05 12:29:36 crc kubenswrapper[4711]: E1205 12:29:36.497279 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-placement-api:watcher_latest" Dec 05 12:29:36 crc kubenswrapper[4711]: E1205 12:29:36.497478 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:38.102.83.20:5001/podified-master-centos10/openstack-placement-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lds9g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-7sl8r_openstack(89800be3-c463-4e1e-b92a-abb613b5bf5e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:29:36 crc kubenswrapper[4711]: E1205 12:29:36.498739 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-7sl8r" podUID="89800be3-c463-4e1e-b92a-abb613b5bf5e" Dec 05 12:29:36 crc kubenswrapper[4711]: E1205 12:29:36.821745 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-placement-api:watcher_latest\\\"\"" pod="openstack/placement-db-sync-7sl8r" podUID="89800be3-c463-4e1e-b92a-abb613b5bf5e" Dec 05 12:29:36 crc kubenswrapper[4711]: I1205 12:29:36.823926 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 12:29:44 crc kubenswrapper[4711]: I1205 12:29:44.453536 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.080832 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:49 crc kubenswrapper[4711]: E1205 12:29:49.087754 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-glance-api:watcher_latest" Dec 05 12:29:49 crc kubenswrapper[4711]: E1205 12:29:49.087970 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-glance-api:watcher_latest" Dec 05 12:29:49 crc kubenswrapper[4711]: E1205 12:29:49.088124 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:38.102.83.20:5001/podified-master-centos10/openstack-glance-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-msfbn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-5gbrt_openstack(5e7e345d-f9d1-4c96-9da9-b960d54c7b5b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:29:49 crc kubenswrapper[4711]: E1205 12:29:49.089337 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-5gbrt" podUID="5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.177257 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vkrj\" (UniqueName: \"kubernetes.io/projected/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-kube-api-access-7vkrj\") pod \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.177415 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-db-sync-config-data\") pod \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.177530 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-combined-ca-bundle\") pod \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.177564 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-config-data\") pod \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\" (UID: \"c46ee7cc-06c9-41b0-b560-cc35c14dbf00\") " Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.183569 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c46ee7cc-06c9-41b0-b560-cc35c14dbf00" (UID: "c46ee7cc-06c9-41b0-b560-cc35c14dbf00"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.187027 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-kube-api-access-7vkrj" (OuterVolumeSpecName: "kube-api-access-7vkrj") pod "c46ee7cc-06c9-41b0-b560-cc35c14dbf00" (UID: "c46ee7cc-06c9-41b0-b560-cc35c14dbf00"). InnerVolumeSpecName "kube-api-access-7vkrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.217345 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c46ee7cc-06c9-41b0-b560-cc35c14dbf00" (UID: "c46ee7cc-06c9-41b0-b560-cc35c14dbf00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.237128 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-config-data" (OuterVolumeSpecName: "config-data") pod "c46ee7cc-06c9-41b0-b560-cc35c14dbf00" (UID: "c46ee7cc-06c9-41b0-b560-cc35c14dbf00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.280163 4711 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.280207 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.280216 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.280225 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vkrj\" (UniqueName: \"kubernetes.io/projected/c46ee7cc-06c9-41b0-b560-cc35c14dbf00-kube-api-access-7vkrj\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.454349 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.454771 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.925835 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-h26mq" Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.934762 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-h26mq" event={"ID":"c46ee7cc-06c9-41b0-b560-cc35c14dbf00","Type":"ContainerDied","Data":"df3875247d6c204571bfec835db2ec9c6049fa419b23d427da8d578ad24c1743"} Dec 05 12:29:49 crc kubenswrapper[4711]: I1205 12:29:49.934827 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df3875247d6c204571bfec835db2ec9c6049fa419b23d427da8d578ad24c1743" Dec 05 12:29:49 crc kubenswrapper[4711]: E1205 12:29:49.934919 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-glance-api:watcher_latest\\\"\"" pod="openstack/glance-db-sync-5gbrt" podUID="5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.386183 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:29:50 crc kubenswrapper[4711]: E1205 12:29:50.395975 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c46ee7cc-06c9-41b0-b560-cc35c14dbf00" containerName="watcher-db-sync" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.396537 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c46ee7cc-06c9-41b0-b560-cc35c14dbf00" containerName="watcher-db-sync" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.397135 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c46ee7cc-06c9-41b0-b560-cc35c14dbf00" containerName="watcher-db-sync" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.435247 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.449667 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.463205 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-t9qnb" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.463376 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.464276 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.464412 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.468118 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.483548 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.517151 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.518352 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.522993 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.526359 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjvpl\" (UniqueName: \"kubernetes.io/projected/655d9acd-df17-43e8-92b8-4f175cdda6b4-kube-api-access-fjvpl\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.526415 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655d9acd-df17-43e8-92b8-4f175cdda6b4-logs\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.526458 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-logs\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.526498 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.526516 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.526773 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-config-data\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.526844 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.526871 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrrt2\" (UniqueName: \"kubernetes.io/projected/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-kube-api-access-rrrt2\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.526910 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-config-data\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.547514 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629066 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629129 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrrt2\" (UniqueName: \"kubernetes.io/projected/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-kube-api-access-rrrt2\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629158 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-config-data\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629192 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-logs\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629230 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjvpl\" (UniqueName: \"kubernetes.io/projected/655d9acd-df17-43e8-92b8-4f175cdda6b4-kube-api-access-fjvpl\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629276 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655d9acd-df17-43e8-92b8-4f175cdda6b4-logs\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629296 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629337 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-logs\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629355 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjnwp\" (UniqueName: \"kubernetes.io/projected/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-kube-api-access-fjnwp\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629375 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-config-data\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629423 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629447 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629492 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.629526 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-config-data\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.631305 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655d9acd-df17-43e8-92b8-4f175cdda6b4-logs\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.631478 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-logs\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.634556 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.634822 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.635040 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-config-data\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.636717 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.638957 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-config-data\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.648819 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrrt2\" (UniqueName: \"kubernetes.io/projected/4e6cdb19-ee98-47d7-9b19-9edfb9fe3907-kube-api-access-rrrt2\") pod \"watcher-applier-0\" (UID: \"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907\") " pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.649952 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjvpl\" (UniqueName: \"kubernetes.io/projected/655d9acd-df17-43e8-92b8-4f175cdda6b4-kube-api-access-fjvpl\") pod \"watcher-api-0\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.731940 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.732061 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjnwp\" (UniqueName: \"kubernetes.io/projected/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-kube-api-access-fjnwp\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.732119 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-config-data\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.732253 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.732430 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-logs\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.733055 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-logs\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.735574 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.736985 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-config-data\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.737144 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.748825 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjnwp\" (UniqueName: \"kubernetes.io/projected/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-kube-api-access-fjnwp\") pod \"watcher-decision-engine-0\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.787218 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.797041 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 12:29:50 crc kubenswrapper[4711]: I1205 12:29:50.849319 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:29:54 crc kubenswrapper[4711]: I1205 12:29:54.455508 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 05 12:29:57 crc kubenswrapper[4711]: I1205 12:29:57.995594 4711 generic.go:334] "Generic (PLEG): container finished" podID="37cbdd67-d3c7-4318-9a73-82b9a4e249fa" containerID="103600391a03d143244c4b72b376da853989a793ca2842d0221c71aea76bff84" exitCode=0 Dec 05 12:29:57 crc kubenswrapper[4711]: I1205 12:29:57.995638 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kwn6b" event={"ID":"37cbdd67-d3c7-4318-9a73-82b9a4e249fa","Type":"ContainerDied","Data":"103600391a03d143244c4b72b376da853989a793ca2842d0221c71aea76bff84"} Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.162331 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.300558 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-sb\") pod \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.300648 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-nb\") pod \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.300776 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-config\") pod \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.300807 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-svc\") pod \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.300853 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-swift-storage-0\") pod \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.300894 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpxqt\" (UniqueName: \"kubernetes.io/projected/cc0d288d-3af3-4af4-9461-a70f2e14d5da-kube-api-access-dpxqt\") pod \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\" (UID: \"cc0d288d-3af3-4af4-9461-a70f2e14d5da\") " Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.305815 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc0d288d-3af3-4af4-9461-a70f2e14d5da-kube-api-access-dpxqt" (OuterVolumeSpecName: "kube-api-access-dpxqt") pod "cc0d288d-3af3-4af4-9461-a70f2e14d5da" (UID: "cc0d288d-3af3-4af4-9461-a70f2e14d5da"). InnerVolumeSpecName "kube-api-access-dpxqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.345193 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cc0d288d-3af3-4af4-9461-a70f2e14d5da" (UID: "cc0d288d-3af3-4af4-9461-a70f2e14d5da"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.351641 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cc0d288d-3af3-4af4-9461-a70f2e14d5da" (UID: "cc0d288d-3af3-4af4-9461-a70f2e14d5da"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.356217 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-config" (OuterVolumeSpecName: "config") pod "cc0d288d-3af3-4af4-9461-a70f2e14d5da" (UID: "cc0d288d-3af3-4af4-9461-a70f2e14d5da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.361162 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cc0d288d-3af3-4af4-9461-a70f2e14d5da" (UID: "cc0d288d-3af3-4af4-9461-a70f2e14d5da"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.373268 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cc0d288d-3af3-4af4-9461-a70f2e14d5da" (UID: "cc0d288d-3af3-4af4-9461-a70f2e14d5da"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.405445 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.405488 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.405502 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.405516 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.405529 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpxqt\" (UniqueName: \"kubernetes.io/projected/cc0d288d-3af3-4af4-9461-a70f2e14d5da-kube-api-access-dpxqt\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:58 crc kubenswrapper[4711]: I1205 12:29:58.405542 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc0d288d-3af3-4af4-9461-a70f2e14d5da-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:58 crc kubenswrapper[4711]: E1205 12:29:58.904874 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc0d288d_3af3_4af4_9461_a70f2e14d5da.slice\": RecentStats: unable to find data in memory cache]" Dec 05 12:29:59 crc kubenswrapper[4711]: I1205 12:29:59.005306 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" Dec 05 12:29:59 crc kubenswrapper[4711]: I1205 12:29:59.005297 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" event={"ID":"cc0d288d-3af3-4af4-9461-a70f2e14d5da","Type":"ContainerDied","Data":"7f96e36a5a462871a97f87d52912048afc808bc71e48997644c4a5c478a3304b"} Dec 05 12:29:59 crc kubenswrapper[4711]: I1205 12:29:59.006852 4711 scope.go:117] "RemoveContainer" containerID="62ba5a1faa76c1fe3b40d4beaa6c236d0f5717efe388d3f1ae5ccef8b89cc838" Dec 05 12:29:59 crc kubenswrapper[4711]: I1205 12:29:59.041368 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-859dfd4ddf-jlv5q"] Dec 05 12:29:59 crc kubenswrapper[4711]: I1205 12:29:59.054125 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-859dfd4ddf-jlv5q"] Dec 05 12:29:59 crc kubenswrapper[4711]: I1205 12:29:59.456155 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-859dfd4ddf-jlv5q" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.141812 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh"] Dec 05 12:30:00 crc kubenswrapper[4711]: E1205 12:30:00.142183 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="init" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.142194 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="init" Dec 05 12:30:00 crc kubenswrapper[4711]: E1205 12:30:00.142206 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="dnsmasq-dns" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.142213 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="dnsmasq-dns" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.142418 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" containerName="dnsmasq-dns" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.143063 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.152320 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh"] Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.153740 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.153753 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.241893 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-secret-volume\") pod \"collect-profiles-29415630-8t5bh\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.241984 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-config-volume\") pod \"collect-profiles-29415630-8t5bh\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.242046 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl8pr\" (UniqueName: \"kubernetes.io/projected/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-kube-api-access-jl8pr\") pod \"collect-profiles-29415630-8t5bh\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.343728 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-secret-volume\") pod \"collect-profiles-29415630-8t5bh\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.343821 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-config-volume\") pod \"collect-profiles-29415630-8t5bh\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.343892 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl8pr\" (UniqueName: \"kubernetes.io/projected/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-kube-api-access-jl8pr\") pod \"collect-profiles-29415630-8t5bh\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.345040 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-config-volume\") pod \"collect-profiles-29415630-8t5bh\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.350413 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-secret-volume\") pod \"collect-profiles-29415630-8t5bh\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.362865 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl8pr\" (UniqueName: \"kubernetes.io/projected/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-kube-api-access-jl8pr\") pod \"collect-profiles-29415630-8t5bh\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.470607 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:00 crc kubenswrapper[4711]: I1205 12:30:00.697217 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc0d288d-3af3-4af4-9461-a70f2e14d5da" path="/var/lib/kubelet/pods/cc0d288d-3af3-4af4-9461-a70f2e14d5da/volumes" Dec 05 12:30:00 crc kubenswrapper[4711]: E1205 12:30:00.755100 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-barbican-api:watcher_latest" Dec 05 12:30:00 crc kubenswrapper[4711]: E1205 12:30:00.755177 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-barbican-api:watcher_latest" Dec 05 12:30:00 crc kubenswrapper[4711]: E1205 12:30:00.755547 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:38.102.83.20:5001/podified-master-centos10/openstack-barbican-api:watcher_latest,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2wpzd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-sm4bg_openstack(ddff9a4d-a020-4de4-a114-694bec9908f9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:30:00 crc kubenswrapper[4711]: E1205 12:30:00.757165 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-sm4bg" podUID="ddff9a4d-a020-4de4-a114-694bec9908f9" Dec 05 12:30:01 crc kubenswrapper[4711]: E1205 12:30:01.024904 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-barbican-api:watcher_latest\\\"\"" pod="openstack/barbican-db-sync-sm4bg" podUID="ddff9a4d-a020-4de4-a114-694bec9908f9" Dec 05 12:30:01 crc kubenswrapper[4711]: E1205 12:30:01.105084 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-ceilometer-central:watcher_latest" Dec 05 12:30:01 crc kubenswrapper[4711]: E1205 12:30:01.105147 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-ceilometer-central:watcher_latest" Dec 05 12:30:01 crc kubenswrapper[4711]: E1205 12:30:01.105295 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:38.102.83.20:5001/podified-master-centos10/openstack-ceilometer-central:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n67fh56fh66h547h644h68dh99h556h5b5hd6h596h58bh76h665hbdh55chffhc5h547h56ch4h654h647h6dh657h8fh588h5d6h55fhfdh5ddhbbq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hb8lx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(a059dfe5-97d3-412e-b70b-430bd3ab92b9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:30:02 crc kubenswrapper[4711]: E1205 12:30:02.188021 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Dec 05 12:30:02 crc kubenswrapper[4711]: E1205 12:30:02.188440 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.20:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Dec 05 12:30:02 crc kubenswrapper[4711]: E1205 12:30:02.188596 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:38.102.83.20:5001/podified-master-centos10/openstack-cinder-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7spsq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-j6vdf_openstack(da5d7bb4-71d2-458f-aabf-4cb2ed2f4661): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:30:02 crc kubenswrapper[4711]: E1205 12:30:02.190515 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-j6vdf" podUID="da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.352697 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.381537 4711 scope.go:117] "RemoveContainer" containerID="56b6c394afe68f3127778fd5764c7f667593cce7b0046b4f0a25d9874d8f6497" Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.488158 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-config\") pod \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.488554 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5jpd\" (UniqueName: \"kubernetes.io/projected/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-kube-api-access-c5jpd\") pod \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.488814 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-combined-ca-bundle\") pod \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\" (UID: \"37cbdd67-d3c7-4318-9a73-82b9a4e249fa\") " Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.492810 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-kube-api-access-c5jpd" (OuterVolumeSpecName: "kube-api-access-c5jpd") pod "37cbdd67-d3c7-4318-9a73-82b9a4e249fa" (UID: "37cbdd67-d3c7-4318-9a73-82b9a4e249fa"). InnerVolumeSpecName "kube-api-access-c5jpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.541922 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37cbdd67-d3c7-4318-9a73-82b9a4e249fa" (UID: "37cbdd67-d3c7-4318-9a73-82b9a4e249fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.542070 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-config" (OuterVolumeSpecName: "config") pod "37cbdd67-d3c7-4318-9a73-82b9a4e249fa" (UID: "37cbdd67-d3c7-4318-9a73-82b9a4e249fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.593922 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.593963 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.593976 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5jpd\" (UniqueName: \"kubernetes.io/projected/37cbdd67-d3c7-4318-9a73-82b9a4e249fa-kube-api-access-c5jpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:02 crc kubenswrapper[4711]: I1205 12:30:02.944096 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-744fd5f788-bs9bc"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.056320 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kwn6b" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.056846 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kwn6b" event={"ID":"37cbdd67-d3c7-4318-9a73-82b9a4e249fa","Type":"ContainerDied","Data":"d1c577540bea510f5f338354e1b3f067a755d337312ffc62f349ee8fc79e9583"} Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.056874 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1c577540bea510f5f338354e1b3f067a755d337312ffc62f349ee8fc79e9583" Dec 05 12:30:03 crc kubenswrapper[4711]: E1205 12:30:03.063796 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.20:5001/podified-master-centos10/openstack-cinder-api:watcher_latest\\\"\"" pod="openstack/cinder-db-sync-j6vdf" podUID="da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.103446 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.125705 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2g9vv"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.132628 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6856c6c898-9lzvt"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.140719 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.244482 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.252173 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.639228 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7745988997-jb8k5"] Dec 05 12:30:03 crc kubenswrapper[4711]: E1205 12:30:03.640069 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37cbdd67-d3c7-4318-9a73-82b9a4e249fa" containerName="neutron-db-sync" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.640085 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="37cbdd67-d3c7-4318-9a73-82b9a4e249fa" containerName="neutron-db-sync" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.640310 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="37cbdd67-d3c7-4318-9a73-82b9a4e249fa" containerName="neutron-db-sync" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.645881 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.675601 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7745988997-jb8k5"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.725789 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-swift-storage-0\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.726363 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-sb\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.726422 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-nb\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.726458 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-config\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.726571 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t58rw\" (UniqueName: \"kubernetes.io/projected/7684da94-d794-4cbd-accc-224b649a1c14-kube-api-access-t58rw\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.726633 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-svc\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.847924 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-svc\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.848362 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-swift-storage-0\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.849015 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-sb\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.849140 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-nb\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.849201 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-config\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.850144 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-swift-storage-0\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.850462 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-svc\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.851254 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-nb\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.851279 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-sb\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.852275 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-config\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.859852 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-555488f586-gdlch"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.864819 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t58rw\" (UniqueName: \"kubernetes.io/projected/7684da94-d794-4cbd-accc-224b649a1c14-kube-api-access-t58rw\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.881553 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.884538 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.884857 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.886268 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.886617 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-85sx7" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.912989 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t58rw\" (UniqueName: \"kubernetes.io/projected/7684da94-d794-4cbd-accc-224b649a1c14-kube-api-access-t58rw\") pod \"dnsmasq-dns-7745988997-jb8k5\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.939928 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-555488f586-gdlch"] Dec 05 12:30:03 crc kubenswrapper[4711]: I1205 12:30:03.997946 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.068110 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-config\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.068235 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94zrd\" (UniqueName: \"kubernetes.io/projected/76c7c0ee-1c4f-409f-9706-663ff94af792-kube-api-access-94zrd\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.068277 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-ovndb-tls-certs\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.068330 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-httpd-config\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.068353 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-combined-ca-bundle\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.084380 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerStarted","Data":"2139433d5e48ecf1a25ac6e999fc349ca3d877e06d625eb1ff26540a348ef6bb"} Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.086153 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"655d9acd-df17-43e8-92b8-4f175cdda6b4","Type":"ContainerStarted","Data":"fa74880fb1ef458e72b1140faa2e00b4645951d8168f82cd6962954cf031d49f"} Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.087361 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907","Type":"ContainerStarted","Data":"112daaa5b0ff266be0dc737ab8bb73533f07daee903e2d352df8c79d542a6941"} Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.094682 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2g9vv" event={"ID":"1004a56f-792b-4dae-bee1-6be07e0d72dc","Type":"ContainerStarted","Data":"e02971927ebe158052e0b2a1c4d32c44299daa0cd8d1d8ea39f32102b533bd6b"} Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.100060 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6856c6c898-9lzvt" event={"ID":"ba5de8d4-e693-4431-aee3-0ba498f62e8e","Type":"ContainerStarted","Data":"6b415bdaba255aaf1100f4c54a1eb29b7a1bfced8c033c500dfa49f4855d2bb5"} Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.125348 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8d8fdfd5-5nhbq" event={"ID":"f955aa55-498a-457a-8bf3-9214f5751e47","Type":"ContainerStarted","Data":"c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba"} Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.173228 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94zrd\" (UniqueName: \"kubernetes.io/projected/76c7c0ee-1c4f-409f-9706-663ff94af792-kube-api-access-94zrd\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.173293 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-ovndb-tls-certs\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.173343 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-httpd-config\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.173366 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-combined-ca-bundle\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.173414 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-config\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.177296 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6788c646d7-4kxrg" event={"ID":"5f5e58bb-1049-4d61-b78a-a192ae4fe61a","Type":"ContainerStarted","Data":"79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf"} Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.185934 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-httpd-config\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.203952 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" event={"ID":"e88e06c8-ba12-4a4a-b88d-726fc1c0925a","Type":"ContainerStarted","Data":"c62cd7d97c3e587b20b3d505836d894a1fc23d5e507c9c6a92d5e22b8accd976"} Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.210142 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-744fd5f788-bs9bc" event={"ID":"53844e89-65d7-4cbc-b375-dbfef360857b","Type":"ContainerStarted","Data":"1a19ff5754cad182a10b1c0214a8f9acedb0a7693526158772580b30bf8d9223"} Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.239207 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-ovndb-tls-certs\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.242346 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-combined-ca-bundle\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.272803 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94zrd\" (UniqueName: \"kubernetes.io/projected/76c7c0ee-1c4f-409f-9706-663ff94af792-kube-api-access-94zrd\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.274224 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-config\") pod \"neutron-555488f586-gdlch\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.570055 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:04 crc kubenswrapper[4711]: I1205 12:30:04.910977 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7745988997-jb8k5"] Dec 05 12:30:04 crc kubenswrapper[4711]: W1205 12:30:04.937967 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7684da94_d794_4cbd_accc_224b649a1c14.slice/crio-a459c8aed21d730a927c545b2f3f19c5e923c600487b11a1c5c66af0ff9f0f8d WatchSource:0}: Error finding container a459c8aed21d730a927c545b2f3f19c5e923c600487b11a1c5c66af0ff9f0f8d: Status 404 returned error can't find the container with id a459c8aed21d730a927c545b2f3f19c5e923c600487b11a1c5c66af0ff9f0f8d Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.233179 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" event={"ID":"e88e06c8-ba12-4a4a-b88d-726fc1c0925a","Type":"ContainerDied","Data":"f1490ed30eb8061924ece8013970050987cb50549740e3dfd27b62ef6df1b09d"} Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.232965 4711 generic.go:334] "Generic (PLEG): container finished" podID="e88e06c8-ba12-4a4a-b88d-726fc1c0925a" containerID="f1490ed30eb8061924ece8013970050987cb50549740e3dfd27b62ef6df1b09d" exitCode=0 Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.244254 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2g9vv" event={"ID":"1004a56f-792b-4dae-bee1-6be07e0d72dc","Type":"ContainerStarted","Data":"5c128be0ec38b635947cacf350dd5e9459e6140109ccd5dfd0313d35a484e8a9"} Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.246402 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7745988997-jb8k5" event={"ID":"7684da94-d794-4cbd-accc-224b649a1c14","Type":"ContainerStarted","Data":"a459c8aed21d730a927c545b2f3f19c5e923c600487b11a1c5c66af0ff9f0f8d"} Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.248758 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-649d45d4d9-fm5xf" event={"ID":"442afda2-58a4-42e0-8793-08854bf5a587","Type":"ContainerStarted","Data":"18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a"} Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.260688 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7sl8r" event={"ID":"89800be3-c463-4e1e-b92a-abb613b5bf5e","Type":"ContainerStarted","Data":"74140c15b9c50e00a2dc82b3d4b1074606e814fe48324d3bb6a5dc41c13ddcf4"} Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.267243 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"655d9acd-df17-43e8-92b8-4f175cdda6b4","Type":"ContainerStarted","Data":"8401a42463d7e3b9de96ab55c5591ddf4fa85e49ebee1db2e3b54dd66608ea9d"} Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.288086 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-2g9vv" podStartSLOduration=32.288068689 podStartE2EDuration="32.288068689s" podCreationTimestamp="2025-12-05 12:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:05.278724139 +0000 UTC m=+1250.863046469" watchObservedRunningTime="2025-12-05 12:30:05.288068689 +0000 UTC m=+1250.872391009" Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.342437 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-7sl8r" podStartSLOduration=5.064439335 podStartE2EDuration="47.342412961s" podCreationTimestamp="2025-12-05 12:29:18 +0000 UTC" firstStartedPulling="2025-12-05 12:29:20.158329205 +0000 UTC m=+1205.742651535" lastFinishedPulling="2025-12-05 12:30:02.436302831 +0000 UTC m=+1248.020625161" observedRunningTime="2025-12-05 12:30:05.332231671 +0000 UTC m=+1250.916553991" watchObservedRunningTime="2025-12-05 12:30:05.342412961 +0000 UTC m=+1250.926735301" Dec 05 12:30:05 crc kubenswrapper[4711]: I1205 12:30:05.448663 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-555488f586-gdlch"] Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.301847 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8d8fdfd5-5nhbq" event={"ID":"f955aa55-498a-457a-8bf3-9214f5751e47","Type":"ContainerStarted","Data":"f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be"} Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.301980 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b8d8fdfd5-5nhbq" podUID="f955aa55-498a-457a-8bf3-9214f5751e47" containerName="horizon-log" containerID="cri-o://c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba" gracePeriod=30 Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.302368 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b8d8fdfd5-5nhbq" podUID="f955aa55-498a-457a-8bf3-9214f5751e47" containerName="horizon" containerID="cri-o://f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be" gracePeriod=30 Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.310016 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a059dfe5-97d3-412e-b70b-430bd3ab92b9","Type":"ContainerStarted","Data":"454c5c2c8669dfd6e10ce7d7bb01e3db1fc9bd864e5c7e05ed5b825fbe3bed20"} Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.321060 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6788c646d7-4kxrg" event={"ID":"5f5e58bb-1049-4d61-b78a-a192ae4fe61a","Type":"ContainerStarted","Data":"126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004"} Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.321213 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6788c646d7-4kxrg" podUID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerName="horizon-log" containerID="cri-o://79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf" gracePeriod=30 Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.321297 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6788c646d7-4kxrg" podUID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerName="horizon" containerID="cri-o://126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004" gracePeriod=30 Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.335120 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-744fd5f788-bs9bc" event={"ID":"53844e89-65d7-4cbc-b375-dbfef360857b","Type":"ContainerStarted","Data":"a8b2276036c2d8555cc86ef88f650eefe07004dceb19cb735a3726cc375238af"} Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.352767 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6788c646d7-4kxrg" podStartSLOduration=7.369447317 podStartE2EDuration="46.352752785s" podCreationTimestamp="2025-12-05 12:29:20 +0000 UTC" firstStartedPulling="2025-12-05 12:29:23.261651541 +0000 UTC m=+1208.845973871" lastFinishedPulling="2025-12-05 12:30:02.244957009 +0000 UTC m=+1247.829279339" observedRunningTime="2025-12-05 12:30:06.351999516 +0000 UTC m=+1251.936321856" watchObservedRunningTime="2025-12-05 12:30:06.352752785 +0000 UTC m=+1251.937075115" Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.354412 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b8d8fdfd5-5nhbq" podStartSLOduration=8.13025684 podStartE2EDuration="49.354407286s" podCreationTimestamp="2025-12-05 12:29:17 +0000 UTC" firstStartedPulling="2025-12-05 12:29:19.864203282 +0000 UTC m=+1205.448525612" lastFinishedPulling="2025-12-05 12:30:01.088353728 +0000 UTC m=+1246.672676058" observedRunningTime="2025-12-05 12:30:06.327103477 +0000 UTC m=+1251.911425817" watchObservedRunningTime="2025-12-05 12:30:06.354407286 +0000 UTC m=+1251.938729606" Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.355076 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5gbrt" event={"ID":"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b","Type":"ContainerStarted","Data":"6930b6c7d4ac8c11c92e437f563aa3ea5fa4d8ffbb9a75e0aab2327b4cdd0aa2"} Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.377418 4711 generic.go:334] "Generic (PLEG): container finished" podID="7684da94-d794-4cbd-accc-224b649a1c14" containerID="54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb" exitCode=0 Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.377782 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7745988997-jb8k5" event={"ID":"7684da94-d794-4cbd-accc-224b649a1c14","Type":"ContainerDied","Data":"54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb"} Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.381960 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-5gbrt" podStartSLOduration=4.937930936 podStartE2EDuration="44.381943781s" podCreationTimestamp="2025-12-05 12:29:22 +0000 UTC" firstStartedPulling="2025-12-05 12:29:23.92105557 +0000 UTC m=+1209.505377900" lastFinishedPulling="2025-12-05 12:30:03.365068425 +0000 UTC m=+1248.949390745" observedRunningTime="2025-12-05 12:30:06.375638106 +0000 UTC m=+1251.959960436" watchObservedRunningTime="2025-12-05 12:30:06.381943781 +0000 UTC m=+1251.966266111" Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.385309 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-649d45d4d9-fm5xf" event={"ID":"442afda2-58a4-42e0-8793-08854bf5a587","Type":"ContainerStarted","Data":"54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249"} Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.385525 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-649d45d4d9-fm5xf" podUID="442afda2-58a4-42e0-8793-08854bf5a587" containerName="horizon-log" containerID="cri-o://18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a" gracePeriod=30 Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.385675 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-649d45d4d9-fm5xf" podUID="442afda2-58a4-42e0-8793-08854bf5a587" containerName="horizon" containerID="cri-o://54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249" gracePeriod=30 Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.390844 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"655d9acd-df17-43e8-92b8-4f175cdda6b4","Type":"ContainerStarted","Data":"a6f51066ef8a4a296da4c8ab1e2110be7b51674e93abe5812d8dd2a9a63c0376"} Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.392472 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.398427 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6856c6c898-9lzvt" event={"ID":"ba5de8d4-e693-4431-aee3-0ba498f62e8e","Type":"ContainerStarted","Data":"6979c3b8135796d4ff5e43809f0d5a752f1ad16f5e6ca98524ded86786680a5e"} Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.427638 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-649d45d4d9-fm5xf" podStartSLOduration=5.840987655 podStartE2EDuration="48.42761547s" podCreationTimestamp="2025-12-05 12:29:18 +0000 UTC" firstStartedPulling="2025-12-05 12:29:19.657147505 +0000 UTC m=+1205.241469835" lastFinishedPulling="2025-12-05 12:30:02.24377532 +0000 UTC m=+1247.828097650" observedRunningTime="2025-12-05 12:30:06.425610322 +0000 UTC m=+1252.009932652" watchObservedRunningTime="2025-12-05 12:30:06.42761547 +0000 UTC m=+1252.011937800" Dec 05 12:30:06 crc kubenswrapper[4711]: I1205 12:30:06.463847 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=16.463829479 podStartE2EDuration="16.463829479s" podCreationTimestamp="2025-12-05 12:29:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:06.458461877 +0000 UTC m=+1252.042784237" watchObservedRunningTime="2025-12-05 12:30:06.463829479 +0000 UTC m=+1252.048151809" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.301538 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6b64c669fc-xv225"] Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.303798 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.307338 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.307578 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.310758 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b64c669fc-xv225"] Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.457525 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-public-tls-certs\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.457597 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-internal-tls-certs\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.457639 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-config\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.457886 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-httpd-config\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.458107 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-ovndb-tls-certs\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.458363 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bkjl\" (UniqueName: \"kubernetes.io/projected/4fa030c6-14a5-4924-a293-cfa089e98f54-kube-api-access-8bkjl\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.458693 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-combined-ca-bundle\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.560257 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bkjl\" (UniqueName: \"kubernetes.io/projected/4fa030c6-14a5-4924-a293-cfa089e98f54-kube-api-access-8bkjl\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.560729 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-combined-ca-bundle\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.560823 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-public-tls-certs\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.560918 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-internal-tls-certs\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.561007 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-config\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.561137 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-httpd-config\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.561222 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-ovndb-tls-certs\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.567944 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-httpd-config\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.569699 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-ovndb-tls-certs\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.570482 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-combined-ca-bundle\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.572746 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-config\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.574555 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-internal-tls-certs\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.575297 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa030c6-14a5-4924-a293-cfa089e98f54-public-tls-certs\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.594847 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bkjl\" (UniqueName: \"kubernetes.io/projected/4fa030c6-14a5-4924-a293-cfa089e98f54-kube-api-access-8bkjl\") pod \"neutron-6b64c669fc-xv225\" (UID: \"4fa030c6-14a5-4924-a293-cfa089e98f54\") " pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:07 crc kubenswrapper[4711]: I1205 12:30:07.639425 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.334156 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.451162 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" event={"ID":"e88e06c8-ba12-4a4a-b88d-726fc1c0925a","Type":"ContainerDied","Data":"c62cd7d97c3e587b20b3d505836d894a1fc23d5e507c9c6a92d5e22b8accd976"} Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.451233 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c62cd7d97c3e587b20b3d505836d894a1fc23d5e507c9c6a92d5e22b8accd976" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.451243 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.456649 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.457675 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-555488f586-gdlch" event={"ID":"76c7c0ee-1c4f-409f-9706-663ff94af792","Type":"ContainerStarted","Data":"9741b05d9c6690a7e57ec0feb5083fb72c18d1f5882002e89fb192fb0ef6a0af"} Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.476975 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-config-volume\") pod \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.477151 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jl8pr\" (UniqueName: \"kubernetes.io/projected/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-kube-api-access-jl8pr\") pod \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.477215 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-secret-volume\") pod \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\" (UID: \"e88e06c8-ba12-4a4a-b88d-726fc1c0925a\") " Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.477660 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-config-volume" (OuterVolumeSpecName: "config-volume") pod "e88e06c8-ba12-4a4a-b88d-726fc1c0925a" (UID: "e88e06c8-ba12-4a4a-b88d-726fc1c0925a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.477783 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.482545 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-kube-api-access-jl8pr" (OuterVolumeSpecName: "kube-api-access-jl8pr") pod "e88e06c8-ba12-4a4a-b88d-726fc1c0925a" (UID: "e88e06c8-ba12-4a4a-b88d-726fc1c0925a"). InnerVolumeSpecName "kube-api-access-jl8pr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.484470 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e88e06c8-ba12-4a4a-b88d-726fc1c0925a" (UID: "e88e06c8-ba12-4a4a-b88d-726fc1c0925a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.579680 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jl8pr\" (UniqueName: \"kubernetes.io/projected/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-kube-api-access-jl8pr\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.579713 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e88e06c8-ba12-4a4a-b88d-726fc1c0925a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.716154 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:30:08 crc kubenswrapper[4711]: I1205 12:30:08.769402 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:30:09 crc kubenswrapper[4711]: I1205 12:30:09.082358 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b64c669fc-xv225"] Dec 05 12:30:09 crc kubenswrapper[4711]: I1205 12:30:09.470138 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7745988997-jb8k5" event={"ID":"7684da94-d794-4cbd-accc-224b649a1c14","Type":"ContainerStarted","Data":"a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d"} Dec 05 12:30:09 crc kubenswrapper[4711]: I1205 12:30:09.489280 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-555488f586-gdlch" event={"ID":"76c7c0ee-1c4f-409f-9706-663ff94af792","Type":"ContainerStarted","Data":"f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2"} Dec 05 12:30:09 crc kubenswrapper[4711]: I1205 12:30:09.498122 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b64c669fc-xv225" event={"ID":"4fa030c6-14a5-4924-a293-cfa089e98f54","Type":"ContainerStarted","Data":"26c816eafacaaf84f36fb61cad19f9a95ebe088984b86200c59586b1d6634327"} Dec 05 12:30:09 crc kubenswrapper[4711]: I1205 12:30:09.505637 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"4e6cdb19-ee98-47d7-9b19-9edfb9fe3907","Type":"ContainerStarted","Data":"c5331e548dba235576e7920e0249cbb5d769d39fb5d063efdf6134107e7cc62e"} Dec 05 12:30:10 crc kubenswrapper[4711]: I1205 12:30:10.788123 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:30:10 crc kubenswrapper[4711]: I1205 12:30:10.788511 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 05 12:30:10 crc kubenswrapper[4711]: I1205 12:30:10.788619 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:30:11 crc kubenswrapper[4711]: I1205 12:30:11.197197 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:30:11 crc kubenswrapper[4711]: I1205 12:30:11.434045 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.159:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:30:11 crc kubenswrapper[4711]: I1205 12:30:11.830867 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/watcher-api-0" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:30:12 crc kubenswrapper[4711]: I1205 12:30:12.567884 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerStarted","Data":"2335bde3a171c64d57f7ee19e1c6d22c1e1865ea34f968b4e4aac49bc4b8a90e"} Dec 05 12:30:12 crc kubenswrapper[4711]: I1205 12:30:12.569954 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-744fd5f788-bs9bc" event={"ID":"53844e89-65d7-4cbc-b375-dbfef360857b","Type":"ContainerStarted","Data":"d8992c9aa5235fde93e9e62efbeb7b298a387f545314aa2553dcff5475ec8e93"} Dec 05 12:30:12 crc kubenswrapper[4711]: I1205 12:30:12.933494 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 12:30:13 crc kubenswrapper[4711]: I1205 12:30:13.580093 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6856c6c898-9lzvt" event={"ID":"ba5de8d4-e693-4431-aee3-0ba498f62e8e","Type":"ContainerStarted","Data":"950d38f05231a1c91caf701f90a7f8602c80793672899fcb257db2e594789676"} Dec 05 12:30:13 crc kubenswrapper[4711]: I1205 12:30:13.601617 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6856c6c898-9lzvt" podStartSLOduration=46.601603272 podStartE2EDuration="46.601603272s" podCreationTimestamp="2025-12-05 12:29:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:13.59784098 +0000 UTC m=+1259.182163310" watchObservedRunningTime="2025-12-05 12:30:13.601603272 +0000 UTC m=+1259.185925602" Dec 05 12:30:13 crc kubenswrapper[4711]: I1205 12:30:13.630396 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-744fd5f788-bs9bc" podStartSLOduration=46.630361328 podStartE2EDuration="46.630361328s" podCreationTimestamp="2025-12-05 12:29:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:13.625346655 +0000 UTC m=+1259.209668985" watchObservedRunningTime="2025-12-05 12:30:13.630361328 +0000 UTC m=+1259.214683658" Dec 05 12:30:13 crc kubenswrapper[4711]: I1205 12:30:13.657945 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=18.621020124 podStartE2EDuration="23.657925773s" podCreationTimestamp="2025-12-05 12:29:50 +0000 UTC" firstStartedPulling="2025-12-05 12:30:03.38486381 +0000 UTC m=+1248.969186140" lastFinishedPulling="2025-12-05 12:30:08.421769459 +0000 UTC m=+1254.006091789" observedRunningTime="2025-12-05 12:30:13.65450161 +0000 UTC m=+1259.238823960" watchObservedRunningTime="2025-12-05 12:30:13.657925773 +0000 UTC m=+1259.242248103" Dec 05 12:30:14 crc kubenswrapper[4711]: I1205 12:30:14.591318 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b64c669fc-xv225" event={"ID":"4fa030c6-14a5-4924-a293-cfa089e98f54","Type":"ContainerStarted","Data":"c84b986432bbc83a9797b644c5cd67582d08b4744fcdc36e07aae5319216e9a0"} Dec 05 12:30:14 crc kubenswrapper[4711]: I1205 12:30:14.592426 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:14 crc kubenswrapper[4711]: I1205 12:30:14.598612 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:30:14 crc kubenswrapper[4711]: I1205 12:30:14.641738 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7745988997-jb8k5" podStartSLOduration=11.641718926 podStartE2EDuration="11.641718926s" podCreationTimestamp="2025-12-05 12:30:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:14.632854019 +0000 UTC m=+1260.217176349" watchObservedRunningTime="2025-12-05 12:30:14.641718926 +0000 UTC m=+1260.226041256" Dec 05 12:30:14 crc kubenswrapper[4711]: I1205 12:30:14.801243 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7658cc989c-rdbv8"] Dec 05 12:30:14 crc kubenswrapper[4711]: I1205 12:30:14.801453 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" podUID="1f496d6f-8883-493b-866b-117f6b7537e4" containerName="dnsmasq-dns" containerID="cri-o://154ef17e604562451a343074d4609420936fb40637503df88e5ccf3e0ea60f8f" gracePeriod=10 Dec 05 12:30:15 crc kubenswrapper[4711]: I1205 12:30:15.603043 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b64c669fc-xv225" event={"ID":"4fa030c6-14a5-4924-a293-cfa089e98f54","Type":"ContainerStarted","Data":"83059e3eb13c1f5e3670d6de57aab1ab5b18bce043bd6a0067daf1812a5fdef4"} Dec 05 12:30:15 crc kubenswrapper[4711]: I1205 12:30:15.603859 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:15 crc kubenswrapper[4711]: I1205 12:30:15.605683 4711 generic.go:334] "Generic (PLEG): container finished" podID="1f496d6f-8883-493b-866b-117f6b7537e4" containerID="154ef17e604562451a343074d4609420936fb40637503df88e5ccf3e0ea60f8f" exitCode=0 Dec 05 12:30:15 crc kubenswrapper[4711]: I1205 12:30:15.605799 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" event={"ID":"1f496d6f-8883-493b-866b-117f6b7537e4","Type":"ContainerDied","Data":"154ef17e604562451a343074d4609420936fb40637503df88e5ccf3e0ea60f8f"} Dec 05 12:30:15 crc kubenswrapper[4711]: I1205 12:30:15.607742 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-555488f586-gdlch" event={"ID":"76c7c0ee-1c4f-409f-9706-663ff94af792","Type":"ContainerStarted","Data":"e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246"} Dec 05 12:30:15 crc kubenswrapper[4711]: I1205 12:30:15.634018 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6b64c669fc-xv225" podStartSLOduration=8.633990498 podStartE2EDuration="8.633990498s" podCreationTimestamp="2025-12-05 12:30:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:15.626980006 +0000 UTC m=+1261.211302356" watchObservedRunningTime="2025-12-05 12:30:15.633990498 +0000 UTC m=+1261.218312828" Dec 05 12:30:15 crc kubenswrapper[4711]: I1205 12:30:15.653710 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=20.59548892 podStartE2EDuration="25.653687061s" podCreationTimestamp="2025-12-05 12:29:50 +0000 UTC" firstStartedPulling="2025-12-05 12:30:03.385098836 +0000 UTC m=+1248.969421166" lastFinishedPulling="2025-12-05 12:30:08.443296977 +0000 UTC m=+1254.027619307" observedRunningTime="2025-12-05 12:30:15.64593459 +0000 UTC m=+1261.230256920" watchObservedRunningTime="2025-12-05 12:30:15.653687061 +0000 UTC m=+1261.238009391" Dec 05 12:30:15 crc kubenswrapper[4711]: I1205 12:30:15.798653 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Dec 05 12:30:16 crc kubenswrapper[4711]: I1205 12:30:16.647003 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-555488f586-gdlch" podStartSLOduration=13.646978686 podStartE2EDuration="13.646978686s" podCreationTimestamp="2025-12-05 12:30:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:16.640633041 +0000 UTC m=+1262.224955371" watchObservedRunningTime="2025-12-05 12:30:16.646978686 +0000 UTC m=+1262.231301026" Dec 05 12:30:16 crc kubenswrapper[4711]: I1205 12:30:16.759838 4711 scope.go:117] "RemoveContainer" containerID="09b1ac4e38ae04177ac56c09b0ed27081011e9c18b5f500a3a9520144fad932c" Dec 05 12:30:17 crc kubenswrapper[4711]: I1205 12:30:17.606175 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:30:17 crc kubenswrapper[4711]: I1205 12:30:17.606248 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:30:17 crc kubenswrapper[4711]: I1205 12:30:17.675811 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:30:17 crc kubenswrapper[4711]: I1205 12:30:17.675904 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:30:20 crc kubenswrapper[4711]: I1205 12:30:20.666939 4711 generic.go:334] "Generic (PLEG): container finished" podID="1004a56f-792b-4dae-bee1-6be07e0d72dc" containerID="5c128be0ec38b635947cacf350dd5e9459e6140109ccd5dfd0313d35a484e8a9" exitCode=0 Dec 05 12:30:20 crc kubenswrapper[4711]: I1205 12:30:20.668917 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2g9vv" event={"ID":"1004a56f-792b-4dae-bee1-6be07e0d72dc","Type":"ContainerDied","Data":"5c128be0ec38b635947cacf350dd5e9459e6140109ccd5dfd0313d35a484e8a9"} Dec 05 12:30:20 crc kubenswrapper[4711]: I1205 12:30:20.804659 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Dec 05 12:30:20 crc kubenswrapper[4711]: I1205 12:30:20.804785 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 05 12:30:20 crc kubenswrapper[4711]: I1205 12:30:20.822730 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 12:30:20 crc kubenswrapper[4711]: I1205 12:30:20.850714 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:20 crc kubenswrapper[4711]: I1205 12:30:20.850926 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:20 crc kubenswrapper[4711]: I1205 12:30:20.860886 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Dec 05 12:30:20 crc kubenswrapper[4711]: I1205 12:30:20.892797 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:21 crc kubenswrapper[4711]: I1205 12:30:21.722829 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Dec 05 12:30:21 crc kubenswrapper[4711]: I1205 12:30:21.726021 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:23 crc kubenswrapper[4711]: I1205 12:30:23.811187 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" podUID="1f496d6f-8883-493b-866b-117f6b7537e4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.151:5353: i/o timeout" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.093424 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.093653 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api-log" containerID="cri-o://8401a42463d7e3b9de96ab55c5591ddf4fa85e49ebee1db2e3b54dd66608ea9d" gracePeriod=30 Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.093705 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api" containerID="cri-o://a6f51066ef8a4a296da4c8ab1e2110be7b51674e93abe5812d8dd2a9a63c0376" gracePeriod=30 Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.235368 4711 scope.go:117] "RemoveContainer" containerID="0bca0f5b010b985fbbc2b74998a93a323d1e061fb8f7533ece5b0f75defc130c" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.362458 4711 scope.go:117] "RemoveContainer" containerID="7b275127e9d22d152d93ca4965e2526042ad29d9b2bb7af347a49bd8de0236e6" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.423810 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.514350 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.596271 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-svc\") pod \"1f496d6f-8883-493b-866b-117f6b7537e4\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.596412 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-swift-storage-0\") pod \"1f496d6f-8883-493b-866b-117f6b7537e4\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.596456 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-nb\") pod \"1f496d6f-8883-493b-866b-117f6b7537e4\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.596526 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-sb\") pod \"1f496d6f-8883-493b-866b-117f6b7537e4\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.596574 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbh84\" (UniqueName: \"kubernetes.io/projected/1f496d6f-8883-493b-866b-117f6b7537e4-kube-api-access-qbh84\") pod \"1f496d6f-8883-493b-866b-117f6b7537e4\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.596618 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-config\") pod \"1f496d6f-8883-493b-866b-117f6b7537e4\" (UID: \"1f496d6f-8883-493b-866b-117f6b7537e4\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.609698 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f496d6f-8883-493b-866b-117f6b7537e4-kube-api-access-qbh84" (OuterVolumeSpecName: "kube-api-access-qbh84") pod "1f496d6f-8883-493b-866b-117f6b7537e4" (UID: "1f496d6f-8883-493b-866b-117f6b7537e4"). InnerVolumeSpecName "kube-api-access-qbh84". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.669427 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1f496d6f-8883-493b-866b-117f6b7537e4" (UID: "1f496d6f-8883-493b-866b-117f6b7537e4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.674767 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1f496d6f-8883-493b-866b-117f6b7537e4" (UID: "1f496d6f-8883-493b-866b-117f6b7537e4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.698058 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-config-data\") pod \"1004a56f-792b-4dae-bee1-6be07e0d72dc\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.698134 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-credential-keys\") pod \"1004a56f-792b-4dae-bee1-6be07e0d72dc\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.698194 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brndd\" (UniqueName: \"kubernetes.io/projected/1004a56f-792b-4dae-bee1-6be07e0d72dc-kube-api-access-brndd\") pod \"1004a56f-792b-4dae-bee1-6be07e0d72dc\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.698305 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-scripts\") pod \"1004a56f-792b-4dae-bee1-6be07e0d72dc\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.698376 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-combined-ca-bundle\") pod \"1004a56f-792b-4dae-bee1-6be07e0d72dc\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.698499 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-fernet-keys\") pod \"1004a56f-792b-4dae-bee1-6be07e0d72dc\" (UID: \"1004a56f-792b-4dae-bee1-6be07e0d72dc\") " Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.701898 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-config" (OuterVolumeSpecName: "config") pod "1f496d6f-8883-493b-866b-117f6b7537e4" (UID: "1f496d6f-8883-493b-866b-117f6b7537e4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.704370 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbh84\" (UniqueName: \"kubernetes.io/projected/1f496d6f-8883-493b-866b-117f6b7537e4-kube-api-access-qbh84\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.704427 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.704438 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.704448 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.705841 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1f496d6f-8883-493b-866b-117f6b7537e4" (UID: "1f496d6f-8883-493b-866b-117f6b7537e4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.708662 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1004a56f-792b-4dae-bee1-6be07e0d72dc" (UID: "1004a56f-792b-4dae-bee1-6be07e0d72dc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.712809 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1004a56f-792b-4dae-bee1-6be07e0d72dc-kube-api-access-brndd" (OuterVolumeSpecName: "kube-api-access-brndd") pod "1004a56f-792b-4dae-bee1-6be07e0d72dc" (UID: "1004a56f-792b-4dae-bee1-6be07e0d72dc"). InnerVolumeSpecName "kube-api-access-brndd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.714909 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2g9vv" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.716589 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-scripts" (OuterVolumeSpecName: "scripts") pod "1004a56f-792b-4dae-bee1-6be07e0d72dc" (UID: "1004a56f-792b-4dae-bee1-6be07e0d72dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.719075 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.722820 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1004a56f-792b-4dae-bee1-6be07e0d72dc" (UID: "1004a56f-792b-4dae-bee1-6be07e0d72dc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.723529 4711 generic.go:334] "Generic (PLEG): container finished" podID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerID="8401a42463d7e3b9de96ab55c5591ddf4fa85e49ebee1db2e3b54dd66608ea9d" exitCode=143 Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.734090 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1f496d6f-8883-493b-866b-117f6b7537e4" (UID: "1f496d6f-8883-493b-866b-117f6b7537e4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.735041 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2g9vv" event={"ID":"1004a56f-792b-4dae-bee1-6be07e0d72dc","Type":"ContainerDied","Data":"e02971927ebe158052e0b2a1c4d32c44299daa0cd8d1d8ea39f32102b533bd6b"} Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.735180 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e02971927ebe158052e0b2a1c4d32c44299daa0cd8d1d8ea39f32102b533bd6b" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.735275 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" event={"ID":"1f496d6f-8883-493b-866b-117f6b7537e4","Type":"ContainerDied","Data":"52c3df0f2f8b4be4f732bafb25b134cd6ceaaf9ed61888a915be10930b7a2042"} Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.735356 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"655d9acd-df17-43e8-92b8-4f175cdda6b4","Type":"ContainerDied","Data":"8401a42463d7e3b9de96ab55c5591ddf4fa85e49ebee1db2e3b54dd66608ea9d"} Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.735553 4711 scope.go:117] "RemoveContainer" containerID="154ef17e604562451a343074d4609420936fb40637503df88e5ccf3e0ea60f8f" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.751602 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1004a56f-792b-4dae-bee1-6be07e0d72dc" (UID: "1004a56f-792b-4dae-bee1-6be07e0d72dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.761651 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-config-data" (OuterVolumeSpecName: "config-data") pod "1004a56f-792b-4dae-bee1-6be07e0d72dc" (UID: "1004a56f-792b-4dae-bee1-6be07e0d72dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.806062 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.806103 4711 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.806115 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brndd\" (UniqueName: \"kubernetes.io/projected/1004a56f-792b-4dae-bee1-6be07e0d72dc-kube-api-access-brndd\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.806125 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.806136 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.806144 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.806152 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f496d6f-8883-493b-866b-117f6b7537e4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4711]: I1205 12:30:24.806160 4711 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1004a56f-792b-4dae-bee1-6be07e0d72dc-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.070052 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7658cc989c-rdbv8"] Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.084917 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7658cc989c-rdbv8"] Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.162807 4711 scope.go:117] "RemoveContainer" containerID="bae130e97d4ecb80284d9fbfbab2c1c20b252c7789a860b1ff177f6099a146d8" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.756280 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-545d7cb86d-bpnk9"] Dec 05 12:30:25 crc kubenswrapper[4711]: E1205 12:30:25.761424 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f496d6f-8883-493b-866b-117f6b7537e4" containerName="init" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.761457 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f496d6f-8883-493b-866b-117f6b7537e4" containerName="init" Dec 05 12:30:25 crc kubenswrapper[4711]: E1205 12:30:25.761481 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e88e06c8-ba12-4a4a-b88d-726fc1c0925a" containerName="collect-profiles" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.761489 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e88e06c8-ba12-4a4a-b88d-726fc1c0925a" containerName="collect-profiles" Dec 05 12:30:25 crc kubenswrapper[4711]: E1205 12:30:25.761551 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f496d6f-8883-493b-866b-117f6b7537e4" containerName="dnsmasq-dns" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.761560 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f496d6f-8883-493b-866b-117f6b7537e4" containerName="dnsmasq-dns" Dec 05 12:30:25 crc kubenswrapper[4711]: E1205 12:30:25.761591 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1004a56f-792b-4dae-bee1-6be07e0d72dc" containerName="keystone-bootstrap" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.761717 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1004a56f-792b-4dae-bee1-6be07e0d72dc" containerName="keystone-bootstrap" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.763495 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e88e06c8-ba12-4a4a-b88d-726fc1c0925a" containerName="collect-profiles" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.763562 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1004a56f-792b-4dae-bee1-6be07e0d72dc" containerName="keystone-bootstrap" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.763651 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f496d6f-8883-493b-866b-117f6b7537e4" containerName="dnsmasq-dns" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.765777 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.770336 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.774381 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-7sbtb" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.774646 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.775616 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.775957 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.788211 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.159:9322/\": dial tcp 10.217.0.159:9322: connect: connection refused" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.788750 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9322/\": dial tcp 10.217.0.159:9322: connect: connection refused" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.797975 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.815459 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-545d7cb86d-bpnk9"] Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.928602 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-scripts\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.928667 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-public-tls-certs\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.928720 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfj7b\" (UniqueName: \"kubernetes.io/projected/d8337ca9-f920-4f08-b321-15a6b9290a76-kube-api-access-gfj7b\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.928775 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-combined-ca-bundle\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.928827 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-config-data\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.928848 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-fernet-keys\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.928879 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-internal-tls-certs\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:25 crc kubenswrapper[4711]: I1205 12:30:25.929021 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-credential-keys\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.031147 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-config-data\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.031192 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-fernet-keys\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.031219 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-internal-tls-certs\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.031286 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-credential-keys\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.031358 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-scripts\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.031381 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-public-tls-certs\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.031454 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfj7b\" (UniqueName: \"kubernetes.io/projected/d8337ca9-f920-4f08-b321-15a6b9290a76-kube-api-access-gfj7b\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.031492 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-combined-ca-bundle\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.035981 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-config-data\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.037923 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-scripts\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.038438 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-public-tls-certs\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.038567 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-fernet-keys\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.038815 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-credential-keys\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.044124 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-internal-tls-certs\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.044176 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8337ca9-f920-4f08-b321-15a6b9290a76-combined-ca-bundle\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.062942 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfj7b\" (UniqueName: \"kubernetes.io/projected/d8337ca9-f920-4f08-b321-15a6b9290a76-kube-api-access-gfj7b\") pod \"keystone-545d7cb86d-bpnk9\" (UID: \"d8337ca9-f920-4f08-b321-15a6b9290a76\") " pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.189150 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.713743 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f496d6f-8883-493b-866b-117f6b7537e4" path="/var/lib/kubelet/pods/1f496d6f-8883-493b-866b-117f6b7537e4/volumes" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.732202 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-545d7cb86d-bpnk9"] Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.820953 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a059dfe5-97d3-412e-b70b-430bd3ab92b9","Type":"ContainerStarted","Data":"9235ed2ac35f39d072b75d4a15a0434260525cdfdb0838e2d3c5a87670f1dce9"} Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.822790 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-545d7cb86d-bpnk9" event={"ID":"d8337ca9-f920-4f08-b321-15a6b9290a76","Type":"ContainerStarted","Data":"5c8337369aa6cb3b20e1700c0bfeb53e6f991a200c0ef3c3c7b5ec10ae01dd49"} Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.824368 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-sm4bg" event={"ID":"ddff9a4d-a020-4de4-a114-694bec9908f9","Type":"ContainerStarted","Data":"c3d7238ac1459cfd25423978aa6be3db4a7dc0de917f4e2c4b2f8ff6306c34a5"} Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.827223 4711 generic.go:334] "Generic (PLEG): container finished" podID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerID="a6f51066ef8a4a296da4c8ab1e2110be7b51674e93abe5812d8dd2a9a63c0376" exitCode=0 Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.827293 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"655d9acd-df17-43e8-92b8-4f175cdda6b4","Type":"ContainerDied","Data":"a6f51066ef8a4a296da4c8ab1e2110be7b51674e93abe5812d8dd2a9a63c0376"} Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.827340 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"655d9acd-df17-43e8-92b8-4f175cdda6b4","Type":"ContainerDied","Data":"fa74880fb1ef458e72b1140faa2e00b4645951d8168f82cd6962954cf031d49f"} Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.827359 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa74880fb1ef458e72b1140faa2e00b4645951d8168f82cd6962954cf031d49f" Dec 05 12:30:26 crc kubenswrapper[4711]: I1205 12:30:26.850339 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-sm4bg" podStartSLOduration=3.249058101 podStartE2EDuration="1m8.850283939s" podCreationTimestamp="2025-12-05 12:29:18 +0000 UTC" firstStartedPulling="2025-12-05 12:29:20.094889629 +0000 UTC m=+1205.679211959" lastFinishedPulling="2025-12-05 12:30:25.696115477 +0000 UTC m=+1271.280437797" observedRunningTime="2025-12-05 12:30:26.845266656 +0000 UTC m=+1272.429588996" watchObservedRunningTime="2025-12-05 12:30:26.850283939 +0000 UTC m=+1272.434606269" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.018853 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.154340 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-config-data\") pod \"655d9acd-df17-43e8-92b8-4f175cdda6b4\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.156134 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-custom-prometheus-ca\") pod \"655d9acd-df17-43e8-92b8-4f175cdda6b4\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.156296 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjvpl\" (UniqueName: \"kubernetes.io/projected/655d9acd-df17-43e8-92b8-4f175cdda6b4-kube-api-access-fjvpl\") pod \"655d9acd-df17-43e8-92b8-4f175cdda6b4\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.156608 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655d9acd-df17-43e8-92b8-4f175cdda6b4-logs\") pod \"655d9acd-df17-43e8-92b8-4f175cdda6b4\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.156807 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-combined-ca-bundle\") pod \"655d9acd-df17-43e8-92b8-4f175cdda6b4\" (UID: \"655d9acd-df17-43e8-92b8-4f175cdda6b4\") " Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.157312 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/655d9acd-df17-43e8-92b8-4f175cdda6b4-logs" (OuterVolumeSpecName: "logs") pod "655d9acd-df17-43e8-92b8-4f175cdda6b4" (UID: "655d9acd-df17-43e8-92b8-4f175cdda6b4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.158001 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655d9acd-df17-43e8-92b8-4f175cdda6b4-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.160734 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/655d9acd-df17-43e8-92b8-4f175cdda6b4-kube-api-access-fjvpl" (OuterVolumeSpecName: "kube-api-access-fjvpl") pod "655d9acd-df17-43e8-92b8-4f175cdda6b4" (UID: "655d9acd-df17-43e8-92b8-4f175cdda6b4"). InnerVolumeSpecName "kube-api-access-fjvpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.196410 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "655d9acd-df17-43e8-92b8-4f175cdda6b4" (UID: "655d9acd-df17-43e8-92b8-4f175cdda6b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.200140 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "655d9acd-df17-43e8-92b8-4f175cdda6b4" (UID: "655d9acd-df17-43e8-92b8-4f175cdda6b4"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.232615 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-config-data" (OuterVolumeSpecName: "config-data") pod "655d9acd-df17-43e8-92b8-4f175cdda6b4" (UID: "655d9acd-df17-43e8-92b8-4f175cdda6b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.260318 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.260519 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.260624 4711 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/655d9acd-df17-43e8-92b8-4f175cdda6b4-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.260702 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjvpl\" (UniqueName: \"kubernetes.io/projected/655d9acd-df17-43e8-92b8-4f175cdda6b4-kube-api-access-fjvpl\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.608524 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-744fd5f788-bs9bc" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.156:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.156:8443: connect: connection refused" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.678669 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6856c6c898-9lzvt" podUID="ba5de8d4-e693-4431-aee3-0ba498f62e8e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.157:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.157:8443: connect: connection refused" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.858511 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-545d7cb86d-bpnk9" event={"ID":"d8337ca9-f920-4f08-b321-15a6b9290a76","Type":"ContainerStarted","Data":"c9374fe883b1bb0dd70beb485f66c468a5121e44479ad94f43c9f8cf67839682"} Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.859732 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.865216 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.875613 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-j6vdf" event={"ID":"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661","Type":"ContainerStarted","Data":"039647fb3cedc13b9bd16efffa1e1a5553e27cbcbadb9b7a8b3340cbd5e09621"} Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.893655 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-545d7cb86d-bpnk9" podStartSLOduration=2.893630813 podStartE2EDuration="2.893630813s" podCreationTimestamp="2025-12-05 12:30:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:27.884715503 +0000 UTC m=+1273.469037853" watchObservedRunningTime="2025-12-05 12:30:27.893630813 +0000 UTC m=+1273.477953133" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.935638 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-j6vdf" podStartSLOduration=4.120137589 podStartE2EDuration="1m9.935612362s" podCreationTimestamp="2025-12-05 12:29:18 +0000 UTC" firstStartedPulling="2025-12-05 12:29:19.879948478 +0000 UTC m=+1205.464270808" lastFinishedPulling="2025-12-05 12:30:25.695423251 +0000 UTC m=+1271.279745581" observedRunningTime="2025-12-05 12:30:27.922988682 +0000 UTC m=+1273.507311002" watchObservedRunningTime="2025-12-05 12:30:27.935612362 +0000 UTC m=+1273.519934692" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.952723 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.963251 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.990864 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:30:27 crc kubenswrapper[4711]: E1205 12:30:27.991574 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.991592 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api" Dec 05 12:30:27 crc kubenswrapper[4711]: E1205 12:30:27.991607 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api-log" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.991614 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api-log" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.991819 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api-log" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.991839 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" containerName="watcher-api" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.992833 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.995046 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-internal-svc" Dec 05 12:30:27 crc kubenswrapper[4711]: I1205 12:30:27.997304 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.001028 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-public-svc" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.018447 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.082472 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-public-tls-certs\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.082514 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz5mp\" (UniqueName: \"kubernetes.io/projected/96d9fdfa-55ca-4955-a585-b730c58b8e6f-kube-api-access-jz5mp\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.082533 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-config-data\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.082572 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.082593 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d9fdfa-55ca-4955-a585-b730c58b8e6f-logs\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.082624 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.082662 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.183736 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d9fdfa-55ca-4955-a585-b730c58b8e6f-logs\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.183795 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.183845 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.183929 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-public-tls-certs\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.183952 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz5mp\" (UniqueName: \"kubernetes.io/projected/96d9fdfa-55ca-4955-a585-b730c58b8e6f-kube-api-access-jz5mp\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.183966 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-config-data\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.183997 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.184363 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d9fdfa-55ca-4955-a585-b730c58b8e6f-logs\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.201065 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.201432 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-config-data\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.202291 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.206486 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.218870 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz5mp\" (UniqueName: \"kubernetes.io/projected/96d9fdfa-55ca-4955-a585-b730c58b8e6f-kube-api-access-jz5mp\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.229549 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d9fdfa-55ca-4955-a585-b730c58b8e6f-public-tls-certs\") pod \"watcher-api-0\" (UID: \"96d9fdfa-55ca-4955-a585-b730c58b8e6f\") " pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.335476 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.709641 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="655d9acd-df17-43e8-92b8-4f175cdda6b4" path="/var/lib/kubelet/pods/655d9acd-df17-43e8-92b8-4f175cdda6b4/volumes" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.812919 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7658cc989c-rdbv8" podUID="1f496d6f-8883-493b-866b-117f6b7537e4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.151:5353: i/o timeout" Dec 05 12:30:28 crc kubenswrapper[4711]: I1205 12:30:28.881861 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:30:29 crc kubenswrapper[4711]: I1205 12:30:29.888753 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"96d9fdfa-55ca-4955-a585-b730c58b8e6f","Type":"ContainerStarted","Data":"a10c0a1126c4b8b14af0790b58e7391b39ea7903b5740a6c404f3ddc9b446358"} Dec 05 12:30:29 crc kubenswrapper[4711]: I1205 12:30:29.889254 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"96d9fdfa-55ca-4955-a585-b730c58b8e6f","Type":"ContainerStarted","Data":"069976e66333571563cece2ad56a3ff4d99c8bc7bc316dff7ef2e7bd7af87a64"} Dec 05 12:30:29 crc kubenswrapper[4711]: I1205 12:30:29.889266 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"96d9fdfa-55ca-4955-a585-b730c58b8e6f","Type":"ContainerStarted","Data":"a80bd5f965b29a4e54ac08f629cb2dff58d689563b9976e85532cf2fa1c87a86"} Dec 05 12:30:29 crc kubenswrapper[4711]: I1205 12:30:29.922587 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=2.922559483 podStartE2EDuration="2.922559483s" podCreationTimestamp="2025-12-05 12:30:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:29.916260668 +0000 UTC m=+1275.500582998" watchObservedRunningTime="2025-12-05 12:30:29.922559483 +0000 UTC m=+1275.506881813" Dec 05 12:30:30 crc kubenswrapper[4711]: I1205 12:30:30.904740 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:30:33 crc kubenswrapper[4711]: I1205 12:30:33.335828 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:30:33 crc kubenswrapper[4711]: I1205 12:30:33.336175 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:30:33 crc kubenswrapper[4711]: I1205 12:30:33.765297 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 12:30:34 crc kubenswrapper[4711]: I1205 12:30:34.571465 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:35 crc kubenswrapper[4711]: I1205 12:30:35.271918 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:36 crc kubenswrapper[4711]: E1205 12:30:36.796500 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod442afda2_58a4_42e0_8793_08854bf5a587.slice/crio-18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f496d6f_8883_493b_866b_117f6b7537e4.slice/crio-52c3df0f2f8b4be4f732bafb25b134cd6ceaaf9ed61888a915be10930b7a2042\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod442afda2_58a4_42e0_8793_08854bf5a587.slice/crio-conmon-18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f5e58bb_1049_4d61_b78a_a192ae4fe61a.slice/crio-126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f5e58bb_1049_4d61_b78a_a192ae4fe61a.slice/crio-conmon-79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f5e58bb_1049_4d61_b78a_a192ae4fe61a.slice/crio-79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod442afda2_58a4_42e0_8793_08854bf5a587.slice/crio-conmon-54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf955aa55_498a_457a_8bf3_9214f5751e47.slice/crio-f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89800be3_c463_4e1e_b92a_abb613b5bf5e.slice/crio-74140c15b9c50e00a2dc82b3d4b1074606e814fe48324d3bb6a5dc41c13ddcf4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f5e58bb_1049_4d61_b78a_a192ae4fe61a.slice/crio-conmon-126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004.scope\": RecentStats: unable to find data in memory cache]" Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.907321 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.944843 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.960924 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.972179 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-config-data\") pod \"f955aa55-498a-457a-8bf3-9214f5751e47\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.972308 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-scripts\") pod \"f955aa55-498a-457a-8bf3-9214f5751e47\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.972358 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955aa55-498a-457a-8bf3-9214f5751e47-logs\") pod \"f955aa55-498a-457a-8bf3-9214f5751e47\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.972452 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f955aa55-498a-457a-8bf3-9214f5751e47-horizon-secret-key\") pod \"f955aa55-498a-457a-8bf3-9214f5751e47\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.972552 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8277\" (UniqueName: \"kubernetes.io/projected/f955aa55-498a-457a-8bf3-9214f5751e47-kube-api-access-x8277\") pod \"f955aa55-498a-457a-8bf3-9214f5751e47\" (UID: \"f955aa55-498a-457a-8bf3-9214f5751e47\") " Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.982776 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f955aa55-498a-457a-8bf3-9214f5751e47-kube-api-access-x8277" (OuterVolumeSpecName: "kube-api-access-x8277") pod "f955aa55-498a-457a-8bf3-9214f5751e47" (UID: "f955aa55-498a-457a-8bf3-9214f5751e47"). InnerVolumeSpecName "kube-api-access-x8277". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.983417 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f955aa55-498a-457a-8bf3-9214f5751e47-logs" (OuterVolumeSpecName: "logs") pod "f955aa55-498a-457a-8bf3-9214f5751e47" (UID: "f955aa55-498a-457a-8bf3-9214f5751e47"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.986654 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f955aa55-498a-457a-8bf3-9214f5751e47-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f955aa55-498a-457a-8bf3-9214f5751e47" (UID: "f955aa55-498a-457a-8bf3-9214f5751e47"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.994286 4711 generic.go:334] "Generic (PLEG): container finished" podID="89800be3-c463-4e1e-b92a-abb613b5bf5e" containerID="74140c15b9c50e00a2dc82b3d4b1074606e814fe48324d3bb6a5dc41c13ddcf4" exitCode=0 Dec 05 12:30:36 crc kubenswrapper[4711]: I1205 12:30:36.994405 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7sl8r" event={"ID":"89800be3-c463-4e1e-b92a-abb613b5bf5e","Type":"ContainerDied","Data":"74140c15b9c50e00a2dc82b3d4b1074606e814fe48324d3bb6a5dc41c13ddcf4"} Dec 05 12:30:37 crc kubenswrapper[4711]: E1205 12:30:36.998874 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.000376 4711 generic.go:334] "Generic (PLEG): container finished" podID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerID="2335bde3a171c64d57f7ee19e1c6d22c1e1865ea34f968b4e4aac49bc4b8a90e" exitCode=1 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.000489 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerDied","Data":"2335bde3a171c64d57f7ee19e1c6d22c1e1865ea34f968b4e4aac49bc4b8a90e"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.001173 4711 scope.go:117] "RemoveContainer" containerID="2335bde3a171c64d57f7ee19e1c6d22c1e1865ea34f968b4e4aac49bc4b8a90e" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.015069 4711 generic.go:334] "Generic (PLEG): container finished" podID="f955aa55-498a-457a-8bf3-9214f5751e47" containerID="f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be" exitCode=137 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.015100 4711 generic.go:334] "Generic (PLEG): container finished" podID="f955aa55-498a-457a-8bf3-9214f5751e47" containerID="c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba" exitCode=137 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.015147 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8d8fdfd5-5nhbq" event={"ID":"f955aa55-498a-457a-8bf3-9214f5751e47","Type":"ContainerDied","Data":"f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.015179 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8d8fdfd5-5nhbq" event={"ID":"f955aa55-498a-457a-8bf3-9214f5751e47","Type":"ContainerDied","Data":"c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.015191 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8d8fdfd5-5nhbq" event={"ID":"f955aa55-498a-457a-8bf3-9214f5751e47","Type":"ContainerDied","Data":"cecdd825ba06bc7c432b983723b53c4a8b39a0f611b3c22c34559cbcd194efa1"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.015209 4711 scope.go:117] "RemoveContainer" containerID="f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.015342 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b8d8fdfd5-5nhbq" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.019430 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-scripts" (OuterVolumeSpecName: "scripts") pod "f955aa55-498a-457a-8bf3-9214f5751e47" (UID: "f955aa55-498a-457a-8bf3-9214f5751e47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.034227 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="ceilometer-notification-agent" containerID="cri-o://454c5c2c8669dfd6e10ce7d7bb01e3db1fc9bd864e5c7e05ed5b825fbe3bed20" gracePeriod=30 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.034758 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.034817 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="proxy-httpd" containerID="cri-o://c129a0798110b72322c603984a291548de03fe231fb2d3c5c2ef0b8c8ca0577a" gracePeriod=30 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.034872 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="sg-core" containerID="cri-o://9235ed2ac35f39d072b75d4a15a0434260525cdfdb0838e2d3c5a87670f1dce9" gracePeriod=30 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.050649 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-config-data" (OuterVolumeSpecName: "config-data") pod "f955aa55-498a-457a-8bf3-9214f5751e47" (UID: "f955aa55-498a-457a-8bf3-9214f5751e47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.055593 4711 generic.go:334] "Generic (PLEG): container finished" podID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerID="126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004" exitCode=137 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.055626 4711 generic.go:334] "Generic (PLEG): container finished" podID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerID="79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf" exitCode=137 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.055730 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6788c646d7-4kxrg" event={"ID":"5f5e58bb-1049-4d61-b78a-a192ae4fe61a","Type":"ContainerDied","Data":"126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.055768 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6788c646d7-4kxrg" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.055786 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6788c646d7-4kxrg" event={"ID":"5f5e58bb-1049-4d61-b78a-a192ae4fe61a","Type":"ContainerDied","Data":"79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.055801 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6788c646d7-4kxrg" event={"ID":"5f5e58bb-1049-4d61-b78a-a192ae4fe61a","Type":"ContainerDied","Data":"09684f722133fd58172e4547768832e42a2a549d181b123cfa520ae9a3ce7e5a"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.059925 4711 generic.go:334] "Generic (PLEG): container finished" podID="442afda2-58a4-42e0-8793-08854bf5a587" containerID="54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249" exitCode=137 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.059954 4711 generic.go:334] "Generic (PLEG): container finished" podID="442afda2-58a4-42e0-8793-08854bf5a587" containerID="18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a" exitCode=137 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.059975 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-649d45d4d9-fm5xf" event={"ID":"442afda2-58a4-42e0-8793-08854bf5a587","Type":"ContainerDied","Data":"54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.059999 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-649d45d4d9-fm5xf" event={"ID":"442afda2-58a4-42e0-8793-08854bf5a587","Type":"ContainerDied","Data":"18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.060009 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-649d45d4d9-fm5xf" event={"ID":"442afda2-58a4-42e0-8793-08854bf5a587","Type":"ContainerDied","Data":"ab16a2d81ac8010e9bb9364c2245e6525606d730f3804db5eb311b1407af27b5"} Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.060052 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-649d45d4d9-fm5xf" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079231 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l85st\" (UniqueName: \"kubernetes.io/projected/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-kube-api-access-l85st\") pod \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079334 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442afda2-58a4-42e0-8793-08854bf5a587-logs\") pod \"442afda2-58a4-42e0-8793-08854bf5a587\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079367 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7rsb\" (UniqueName: \"kubernetes.io/projected/442afda2-58a4-42e0-8793-08854bf5a587-kube-api-access-l7rsb\") pod \"442afda2-58a4-42e0-8793-08854bf5a587\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079467 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-config-data\") pod \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079502 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-horizon-secret-key\") pod \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079561 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-config-data\") pod \"442afda2-58a4-42e0-8793-08854bf5a587\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079619 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-scripts\") pod \"442afda2-58a4-42e0-8793-08854bf5a587\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079647 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442afda2-58a4-42e0-8793-08854bf5a587-horizon-secret-key\") pod \"442afda2-58a4-42e0-8793-08854bf5a587\" (UID: \"442afda2-58a4-42e0-8793-08854bf5a587\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079693 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-scripts\") pod \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.079765 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-logs\") pod \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\" (UID: \"5f5e58bb-1049-4d61-b78a-a192ae4fe61a\") " Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.080099 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/442afda2-58a4-42e0-8793-08854bf5a587-logs" (OuterVolumeSpecName: "logs") pod "442afda2-58a4-42e0-8793-08854bf5a587" (UID: "442afda2-58a4-42e0-8793-08854bf5a587"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.080352 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.080377 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955aa55-498a-457a-8bf3-9214f5751e47-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.080406 4711 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f955aa55-498a-457a-8bf3-9214f5751e47-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.080421 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8277\" (UniqueName: \"kubernetes.io/projected/f955aa55-498a-457a-8bf3-9214f5751e47-kube-api-access-x8277\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.080433 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442afda2-58a4-42e0-8793-08854bf5a587-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.080446 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f955aa55-498a-457a-8bf3-9214f5751e47-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.083143 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-logs" (OuterVolumeSpecName: "logs") pod "5f5e58bb-1049-4d61-b78a-a192ae4fe61a" (UID: "5f5e58bb-1049-4d61-b78a-a192ae4fe61a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.086150 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/442afda2-58a4-42e0-8793-08854bf5a587-kube-api-access-l7rsb" (OuterVolumeSpecName: "kube-api-access-l7rsb") pod "442afda2-58a4-42e0-8793-08854bf5a587" (UID: "442afda2-58a4-42e0-8793-08854bf5a587"). InnerVolumeSpecName "kube-api-access-l7rsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.086412 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-kube-api-access-l85st" (OuterVolumeSpecName: "kube-api-access-l85st") pod "5f5e58bb-1049-4d61-b78a-a192ae4fe61a" (UID: "5f5e58bb-1049-4d61-b78a-a192ae4fe61a"). InnerVolumeSpecName "kube-api-access-l85st". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.093953 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5f5e58bb-1049-4d61-b78a-a192ae4fe61a" (UID: "5f5e58bb-1049-4d61-b78a-a192ae4fe61a"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.102821 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/442afda2-58a4-42e0-8793-08854bf5a587-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "442afda2-58a4-42e0-8793-08854bf5a587" (UID: "442afda2-58a4-42e0-8793-08854bf5a587"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.111415 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-scripts" (OuterVolumeSpecName: "scripts") pod "5f5e58bb-1049-4d61-b78a-a192ae4fe61a" (UID: "5f5e58bb-1049-4d61-b78a-a192ae4fe61a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.118027 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-config-data" (OuterVolumeSpecName: "config-data") pod "5f5e58bb-1049-4d61-b78a-a192ae4fe61a" (UID: "5f5e58bb-1049-4d61-b78a-a192ae4fe61a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.119139 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-config-data" (OuterVolumeSpecName: "config-data") pod "442afda2-58a4-42e0-8793-08854bf5a587" (UID: "442afda2-58a4-42e0-8793-08854bf5a587"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.121222 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-scripts" (OuterVolumeSpecName: "scripts") pod "442afda2-58a4-42e0-8793-08854bf5a587" (UID: "442afda2-58a4-42e0-8793-08854bf5a587"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.182360 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.182479 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.182495 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l85st\" (UniqueName: \"kubernetes.io/projected/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-kube-api-access-l85st\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.182509 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7rsb\" (UniqueName: \"kubernetes.io/projected/442afda2-58a4-42e0-8793-08854bf5a587-kube-api-access-l7rsb\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.182530 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.182542 4711 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f5e58bb-1049-4d61-b78a-a192ae4fe61a-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.182553 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.182565 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442afda2-58a4-42e0-8793-08854bf5a587-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.182577 4711 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442afda2-58a4-42e0-8793-08854bf5a587-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.229758 4711 scope.go:117] "RemoveContainer" containerID="c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.253680 4711 scope.go:117] "RemoveContainer" containerID="f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be" Dec 05 12:30:37 crc kubenswrapper[4711]: E1205 12:30:37.254180 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be\": container with ID starting with f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be not found: ID does not exist" containerID="f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.254222 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be"} err="failed to get container status \"f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be\": rpc error: code = NotFound desc = could not find container \"f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be\": container with ID starting with f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.254250 4711 scope.go:117] "RemoveContainer" containerID="c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba" Dec 05 12:30:37 crc kubenswrapper[4711]: E1205 12:30:37.254822 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba\": container with ID starting with c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba not found: ID does not exist" containerID="c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.254863 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba"} err="failed to get container status \"c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba\": rpc error: code = NotFound desc = could not find container \"c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba\": container with ID starting with c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.254892 4711 scope.go:117] "RemoveContainer" containerID="f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.255740 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be"} err="failed to get container status \"f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be\": rpc error: code = NotFound desc = could not find container \"f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be\": container with ID starting with f35facf22002de69bb54b3dea50da4e66ca02727e4910c9d1deaa2a54f5ca4be not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.255791 4711 scope.go:117] "RemoveContainer" containerID="c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.256744 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba"} err="failed to get container status \"c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba\": rpc error: code = NotFound desc = could not find container \"c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba\": container with ID starting with c012e7d45902818606905d4ddd88389f6e7612b82f775205a36bf4359ba41fba not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.256777 4711 scope.go:117] "RemoveContainer" containerID="126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.433144 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b8d8fdfd5-5nhbq"] Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.435905 4711 scope.go:117] "RemoveContainer" containerID="79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.441732 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7b8d8fdfd5-5nhbq"] Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.452789 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-649d45d4d9-fm5xf"] Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.463769 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-649d45d4d9-fm5xf"] Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.471557 4711 scope.go:117] "RemoveContainer" containerID="126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.473000 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6788c646d7-4kxrg"] Dec 05 12:30:37 crc kubenswrapper[4711]: E1205 12:30:37.473611 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004\": container with ID starting with 126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004 not found: ID does not exist" containerID="126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.473789 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004"} err="failed to get container status \"126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004\": rpc error: code = NotFound desc = could not find container \"126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004\": container with ID starting with 126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004 not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.474466 4711 scope.go:117] "RemoveContainer" containerID="79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf" Dec 05 12:30:37 crc kubenswrapper[4711]: E1205 12:30:37.474952 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf\": container with ID starting with 79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf not found: ID does not exist" containerID="79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.475005 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf"} err="failed to get container status \"79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf\": rpc error: code = NotFound desc = could not find container \"79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf\": container with ID starting with 79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.475038 4711 scope.go:117] "RemoveContainer" containerID="126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.475330 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004"} err="failed to get container status \"126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004\": rpc error: code = NotFound desc = could not find container \"126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004\": container with ID starting with 126cc0bb33ffedabf79702095530d66f6c08c0773b49a5b11b5e6376673ec004 not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.475356 4711 scope.go:117] "RemoveContainer" containerID="79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.475740 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf"} err="failed to get container status \"79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf\": rpc error: code = NotFound desc = could not find container \"79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf\": container with ID starting with 79956169a5aa1adcb0fa6b73dc70c9b53cdb23c9621f29b81b10e9983c84ddbf not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.475848 4711 scope.go:117] "RemoveContainer" containerID="54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.481224 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6788c646d7-4kxrg"] Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.645227 4711 scope.go:117] "RemoveContainer" containerID="18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.660531 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6b64c669fc-xv225" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.682103 4711 scope.go:117] "RemoveContainer" containerID="54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249" Dec 05 12:30:37 crc kubenswrapper[4711]: E1205 12:30:37.700905 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249\": container with ID starting with 54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249 not found: ID does not exist" containerID="54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.700969 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249"} err="failed to get container status \"54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249\": rpc error: code = NotFound desc = could not find container \"54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249\": container with ID starting with 54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249 not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.701004 4711 scope.go:117] "RemoveContainer" containerID="18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a" Dec 05 12:30:37 crc kubenswrapper[4711]: E1205 12:30:37.702723 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a\": container with ID starting with 18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a not found: ID does not exist" containerID="18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.702770 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a"} err="failed to get container status \"18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a\": rpc error: code = NotFound desc = could not find container \"18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a\": container with ID starting with 18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.702799 4711 scope.go:117] "RemoveContainer" containerID="54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.715919 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249"} err="failed to get container status \"54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249\": rpc error: code = NotFound desc = could not find container \"54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249\": container with ID starting with 54082eb981af3425b1c66f577435ea66a20fc6a82c8ea9d817455c6470629249 not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.715995 4711 scope.go:117] "RemoveContainer" containerID="18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.729566 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a"} err="failed to get container status \"18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a\": rpc error: code = NotFound desc = could not find container \"18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a\": container with ID starting with 18f5d5d49e61fae546de0f4e3cacaf72a28ea02dc57fca326811610ce265af3a not found: ID does not exist" Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.760597 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-555488f586-gdlch"] Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.760917 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-555488f586-gdlch" podUID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerName="neutron-api" containerID="cri-o://f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2" gracePeriod=30 Dec 05 12:30:37 crc kubenswrapper[4711]: I1205 12:30:37.761090 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-555488f586-gdlch" podUID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerName="neutron-httpd" containerID="cri-o://e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246" gracePeriod=30 Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.069675 4711 generic.go:334] "Generic (PLEG): container finished" podID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerID="9235ed2ac35f39d072b75d4a15a0434260525cdfdb0838e2d3c5a87670f1dce9" exitCode=2 Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.069726 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a059dfe5-97d3-412e-b70b-430bd3ab92b9","Type":"ContainerStarted","Data":"c129a0798110b72322c603984a291548de03fe231fb2d3c5c2ef0b8c8ca0577a"} Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.069746 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a059dfe5-97d3-412e-b70b-430bd3ab92b9","Type":"ContainerDied","Data":"9235ed2ac35f39d072b75d4a15a0434260525cdfdb0838e2d3c5a87670f1dce9"} Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.073600 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerStarted","Data":"7c9787731bc32175de19ee5ab017cbe5383840f61fbb915edbfb4e49a571c912"} Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.077015 4711 generic.go:334] "Generic (PLEG): container finished" podID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerID="e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246" exitCode=0 Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.077109 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-555488f586-gdlch" event={"ID":"76c7c0ee-1c4f-409f-9706-663ff94af792","Type":"ContainerDied","Data":"e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246"} Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.336423 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.348306 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.452566 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7sl8r" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.514423 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-config-data\") pod \"89800be3-c463-4e1e-b92a-abb613b5bf5e\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.514522 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-combined-ca-bundle\") pod \"89800be3-c463-4e1e-b92a-abb613b5bf5e\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.514557 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lds9g\" (UniqueName: \"kubernetes.io/projected/89800be3-c463-4e1e-b92a-abb613b5bf5e-kube-api-access-lds9g\") pod \"89800be3-c463-4e1e-b92a-abb613b5bf5e\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.514622 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-scripts\") pod \"89800be3-c463-4e1e-b92a-abb613b5bf5e\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.514689 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89800be3-c463-4e1e-b92a-abb613b5bf5e-logs\") pod \"89800be3-c463-4e1e-b92a-abb613b5bf5e\" (UID: \"89800be3-c463-4e1e-b92a-abb613b5bf5e\") " Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.515263 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89800be3-c463-4e1e-b92a-abb613b5bf5e-logs" (OuterVolumeSpecName: "logs") pod "89800be3-c463-4e1e-b92a-abb613b5bf5e" (UID: "89800be3-c463-4e1e-b92a-abb613b5bf5e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.533632 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-scripts" (OuterVolumeSpecName: "scripts") pod "89800be3-c463-4e1e-b92a-abb613b5bf5e" (UID: "89800be3-c463-4e1e-b92a-abb613b5bf5e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.534659 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89800be3-c463-4e1e-b92a-abb613b5bf5e-kube-api-access-lds9g" (OuterVolumeSpecName: "kube-api-access-lds9g") pod "89800be3-c463-4e1e-b92a-abb613b5bf5e" (UID: "89800be3-c463-4e1e-b92a-abb613b5bf5e"). InnerVolumeSpecName "kube-api-access-lds9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.556445 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89800be3-c463-4e1e-b92a-abb613b5bf5e" (UID: "89800be3-c463-4e1e-b92a-abb613b5bf5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.589468 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-config-data" (OuterVolumeSpecName: "config-data") pod "89800be3-c463-4e1e-b92a-abb613b5bf5e" (UID: "89800be3-c463-4e1e-b92a-abb613b5bf5e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.616624 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.616668 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.616683 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lds9g\" (UniqueName: \"kubernetes.io/projected/89800be3-c463-4e1e-b92a-abb613b5bf5e-kube-api-access-lds9g\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.616696 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89800be3-c463-4e1e-b92a-abb613b5bf5e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.616708 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89800be3-c463-4e1e-b92a-abb613b5bf5e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.696877 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="442afda2-58a4-42e0-8793-08854bf5a587" path="/var/lib/kubelet/pods/442afda2-58a4-42e0-8793-08854bf5a587/volumes" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.697785 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" path="/var/lib/kubelet/pods/5f5e58bb-1049-4d61-b78a-a192ae4fe61a/volumes" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.698521 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f955aa55-498a-457a-8bf3-9214f5751e47" path="/var/lib/kubelet/pods/f955aa55-498a-457a-8bf3-9214f5751e47/volumes" Dec 05 12:30:38 crc kubenswrapper[4711]: I1205 12:30:38.905876 4711 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","pode88e06c8-ba12-4a4a-b88d-726fc1c0925a"] err="unable to destroy cgroup paths for cgroup [kubepods burstable pode88e06c8-ba12-4a4a-b88d-726fc1c0925a] : Timed out while waiting for systemd to remove kubepods-burstable-pode88e06c8_ba12_4a4a_b88d_726fc1c0925a.slice" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.108031 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7sl8r" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.108707 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7sl8r" event={"ID":"89800be3-c463-4e1e-b92a-abb613b5bf5e","Type":"ContainerDied","Data":"6a63629e01f86610645e2e3802d89142801dcafdc0fdb08c3d0acbc34b473d08"} Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.108728 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a63629e01f86610645e2e3802d89142801dcafdc0fdb08c3d0acbc34b473d08" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.152803 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.182452 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7d94746d9d-m4k6w"] Dec 05 12:30:39 crc kubenswrapper[4711]: E1205 12:30:39.182916 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f955aa55-498a-457a-8bf3-9214f5751e47" containerName="horizon" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.182933 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f955aa55-498a-457a-8bf3-9214f5751e47" containerName="horizon" Dec 05 12:30:39 crc kubenswrapper[4711]: E1205 12:30:39.182952 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="442afda2-58a4-42e0-8793-08854bf5a587" containerName="horizon-log" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.182959 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="442afda2-58a4-42e0-8793-08854bf5a587" containerName="horizon-log" Dec 05 12:30:39 crc kubenswrapper[4711]: E1205 12:30:39.182986 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89800be3-c463-4e1e-b92a-abb613b5bf5e" containerName="placement-db-sync" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.182993 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="89800be3-c463-4e1e-b92a-abb613b5bf5e" containerName="placement-db-sync" Dec 05 12:30:39 crc kubenswrapper[4711]: E1205 12:30:39.183004 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="442afda2-58a4-42e0-8793-08854bf5a587" containerName="horizon" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183011 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="442afda2-58a4-42e0-8793-08854bf5a587" containerName="horizon" Dec 05 12:30:39 crc kubenswrapper[4711]: E1205 12:30:39.183032 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f955aa55-498a-457a-8bf3-9214f5751e47" containerName="horizon-log" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183039 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f955aa55-498a-457a-8bf3-9214f5751e47" containerName="horizon-log" Dec 05 12:30:39 crc kubenswrapper[4711]: E1205 12:30:39.183047 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerName="horizon" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183054 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerName="horizon" Dec 05 12:30:39 crc kubenswrapper[4711]: E1205 12:30:39.183064 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerName="horizon-log" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183070 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerName="horizon-log" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183256 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f955aa55-498a-457a-8bf3-9214f5751e47" containerName="horizon-log" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183276 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="442afda2-58a4-42e0-8793-08854bf5a587" containerName="horizon-log" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183286 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerName="horizon-log" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183297 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f955aa55-498a-457a-8bf3-9214f5751e47" containerName="horizon" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183313 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="89800be3-c463-4e1e-b92a-abb613b5bf5e" containerName="placement-db-sync" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183341 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f5e58bb-1049-4d61-b78a-a192ae4fe61a" containerName="horizon" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.183353 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="442afda2-58a4-42e0-8793-08854bf5a587" containerName="horizon" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.185755 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.193847 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.194029 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.194083 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-4fxgp" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.194285 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.195494 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.227754 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-internal-tls-certs\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.227855 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-config-data\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.227901 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-public-tls-certs\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.228106 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1aa31398-4345-4fca-bbe3-4682d082c3d7-logs\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.228166 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-combined-ca-bundle\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.228222 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-scripts\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.228247 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65l8s\" (UniqueName: \"kubernetes.io/projected/1aa31398-4345-4fca-bbe3-4682d082c3d7-kube-api-access-65l8s\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.242594 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7d94746d9d-m4k6w"] Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.329375 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-internal-tls-certs\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.329445 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-config-data\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.329476 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-public-tls-certs\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.329572 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1aa31398-4345-4fca-bbe3-4682d082c3d7-logs\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.329607 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-combined-ca-bundle\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.329633 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-scripts\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.329651 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65l8s\" (UniqueName: \"kubernetes.io/projected/1aa31398-4345-4fca-bbe3-4682d082c3d7-kube-api-access-65l8s\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.330273 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1aa31398-4345-4fca-bbe3-4682d082c3d7-logs\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.335343 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-combined-ca-bundle\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.337834 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-public-tls-certs\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.337952 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-internal-tls-certs\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.337964 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-config-data\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.338773 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1aa31398-4345-4fca-bbe3-4682d082c3d7-scripts\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.356117 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65l8s\" (UniqueName: \"kubernetes.io/projected/1aa31398-4345-4fca-bbe3-4682d082c3d7-kube-api-access-65l8s\") pod \"placement-7d94746d9d-m4k6w\" (UID: \"1aa31398-4345-4fca-bbe3-4682d082c3d7\") " pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.521585 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:39 crc kubenswrapper[4711]: I1205 12:30:39.973175 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7d94746d9d-m4k6w"] Dec 05 12:30:39 crc kubenswrapper[4711]: W1205 12:30:39.992080 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1aa31398_4345_4fca_bbe3_4682d082c3d7.slice/crio-6869bee1a71e041eef1d5b374b4ab0113a91f2c41c91706f3f3b84bfdcb9fd16 WatchSource:0}: Error finding container 6869bee1a71e041eef1d5b374b4ab0113a91f2c41c91706f3f3b84bfdcb9fd16: Status 404 returned error can't find the container with id 6869bee1a71e041eef1d5b374b4ab0113a91f2c41c91706f3f3b84bfdcb9fd16 Dec 05 12:30:40 crc kubenswrapper[4711]: I1205 12:30:40.120270 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d94746d9d-m4k6w" event={"ID":"1aa31398-4345-4fca-bbe3-4682d082c3d7","Type":"ContainerStarted","Data":"6869bee1a71e041eef1d5b374b4ab0113a91f2c41c91706f3f3b84bfdcb9fd16"} Dec 05 12:30:40 crc kubenswrapper[4711]: I1205 12:30:40.846477 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:30:40 crc kubenswrapper[4711]: I1205 12:30:40.850136 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:40 crc kubenswrapper[4711]: I1205 12:30:40.869582 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:30:40 crc kubenswrapper[4711]: I1205 12:30:40.886058 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.090232 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.132034 4711 generic.go:334] "Generic (PLEG): container finished" podID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerID="f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2" exitCode=0 Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.132103 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-555488f586-gdlch" event={"ID":"76c7c0ee-1c4f-409f-9706-663ff94af792","Type":"ContainerDied","Data":"f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2"} Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.132130 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-555488f586-gdlch" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.132144 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-555488f586-gdlch" event={"ID":"76c7c0ee-1c4f-409f-9706-663ff94af792","Type":"ContainerDied","Data":"9741b05d9c6690a7e57ec0feb5083fb72c18d1f5882002e89fb192fb0ef6a0af"} Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.132163 4711 scope.go:117] "RemoveContainer" containerID="e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.139469 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d94746d9d-m4k6w" event={"ID":"1aa31398-4345-4fca-bbe3-4682d082c3d7","Type":"ContainerStarted","Data":"46bbb49b6f4e0c4248e658114995308a5ffcd927b35bef3264e17cacf835a6ed"} Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.139516 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d94746d9d-m4k6w" event={"ID":"1aa31398-4345-4fca-bbe3-4682d082c3d7","Type":"ContainerStarted","Data":"4629051f570c29bdc37486c02aee60cfc35e02008f99a10c47a2b73ae8627c11"} Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.139538 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.139551 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.139934 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.155042 4711 scope.go:117] "RemoveContainer" containerID="f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.164259 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94zrd\" (UniqueName: \"kubernetes.io/projected/76c7c0ee-1c4f-409f-9706-663ff94af792-kube-api-access-94zrd\") pod \"76c7c0ee-1c4f-409f-9706-663ff94af792\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.164407 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-ovndb-tls-certs\") pod \"76c7c0ee-1c4f-409f-9706-663ff94af792\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.164527 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-httpd-config\") pod \"76c7c0ee-1c4f-409f-9706-663ff94af792\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.164564 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-config\") pod \"76c7c0ee-1c4f-409f-9706-663ff94af792\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.164595 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-combined-ca-bundle\") pod \"76c7c0ee-1c4f-409f-9706-663ff94af792\" (UID: \"76c7c0ee-1c4f-409f-9706-663ff94af792\") " Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.171522 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7d94746d9d-m4k6w" podStartSLOduration=2.171497225 podStartE2EDuration="2.171497225s" podCreationTimestamp="2025-12-05 12:30:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:41.166357299 +0000 UTC m=+1286.750679629" watchObservedRunningTime="2025-12-05 12:30:41.171497225 +0000 UTC m=+1286.755819555" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.176312 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "76c7c0ee-1c4f-409f-9706-663ff94af792" (UID: "76c7c0ee-1c4f-409f-9706-663ff94af792"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.176362 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76c7c0ee-1c4f-409f-9706-663ff94af792-kube-api-access-94zrd" (OuterVolumeSpecName: "kube-api-access-94zrd") pod "76c7c0ee-1c4f-409f-9706-663ff94af792" (UID: "76c7c0ee-1c4f-409f-9706-663ff94af792"). InnerVolumeSpecName "kube-api-access-94zrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.177540 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.191623 4711 scope.go:117] "RemoveContainer" containerID="e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246" Dec 05 12:30:41 crc kubenswrapper[4711]: E1205 12:30:41.192141 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246\": container with ID starting with e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246 not found: ID does not exist" containerID="e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.192175 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246"} err="failed to get container status \"e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246\": rpc error: code = NotFound desc = could not find container \"e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246\": container with ID starting with e891cb99c1786b1bd14cc956eab7d371f6d03c5546d16f33ff9e5e409a202246 not found: ID does not exist" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.192203 4711 scope.go:117] "RemoveContainer" containerID="f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2" Dec 05 12:30:41 crc kubenswrapper[4711]: E1205 12:30:41.196149 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2\": container with ID starting with f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2 not found: ID does not exist" containerID="f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.196201 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2"} err="failed to get container status \"f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2\": rpc error: code = NotFound desc = could not find container \"f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2\": container with ID starting with f10d6b8e4675b5e162a72879b7ced7c189725bbc622b06b4e6457b5d215243e2 not found: ID does not exist" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.237311 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76c7c0ee-1c4f-409f-9706-663ff94af792" (UID: "76c7c0ee-1c4f-409f-9706-663ff94af792"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.266448 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94zrd\" (UniqueName: \"kubernetes.io/projected/76c7c0ee-1c4f-409f-9706-663ff94af792-kube-api-access-94zrd\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.266479 4711 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.266492 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.276561 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-config" (OuterVolumeSpecName: "config") pod "76c7c0ee-1c4f-409f-9706-663ff94af792" (UID: "76c7c0ee-1c4f-409f-9706-663ff94af792"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.300093 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "76c7c0ee-1c4f-409f-9706-663ff94af792" (UID: "76c7c0ee-1c4f-409f-9706-663ff94af792"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.369489 4711 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.369528 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/76c7c0ee-1c4f-409f-9706-663ff94af792-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.512412 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-555488f586-gdlch"] Dec 05 12:30:41 crc kubenswrapper[4711]: I1205 12:30:41.522128 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-555488f586-gdlch"] Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.149995 4711 generic.go:334] "Generic (PLEG): container finished" podID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerID="7c9787731bc32175de19ee5ab017cbe5383840f61fbb915edbfb4e49a571c912" exitCode=1 Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.150083 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerDied","Data":"7c9787731bc32175de19ee5ab017cbe5383840f61fbb915edbfb4e49a571c912"} Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.150315 4711 scope.go:117] "RemoveContainer" containerID="2335bde3a171c64d57f7ee19e1c6d22c1e1865ea34f968b4e4aac49bc4b8a90e" Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.150703 4711 scope.go:117] "RemoveContainer" containerID="7c9787731bc32175de19ee5ab017cbe5383840f61fbb915edbfb4e49a571c912" Dec 05 12:30:42 crc kubenswrapper[4711]: E1205 12:30:42.150972 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.154645 4711 generic.go:334] "Generic (PLEG): container finished" podID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerID="454c5c2c8669dfd6e10ce7d7bb01e3db1fc9bd864e5c7e05ed5b825fbe3bed20" exitCode=0 Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.155462 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a059dfe5-97d3-412e-b70b-430bd3ab92b9","Type":"ContainerDied","Data":"454c5c2c8669dfd6e10ce7d7bb01e3db1fc9bd864e5c7e05ed5b825fbe3bed20"} Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.694583 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76c7c0ee-1c4f-409f-9706-663ff94af792" path="/var/lib/kubelet/pods/76c7c0ee-1c4f-409f-9706-663ff94af792/volumes" Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.787561 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.889190 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6856c6c898-9lzvt" Dec 05 12:30:42 crc kubenswrapper[4711]: I1205 12:30:42.947046 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-744fd5f788-bs9bc"] Dec 05 12:30:43 crc kubenswrapper[4711]: I1205 12:30:43.165150 4711 generic.go:334] "Generic (PLEG): container finished" podID="ddff9a4d-a020-4de4-a114-694bec9908f9" containerID="c3d7238ac1459cfd25423978aa6be3db4a7dc0de917f4e2c4b2f8ff6306c34a5" exitCode=0 Dec 05 12:30:43 crc kubenswrapper[4711]: I1205 12:30:43.165230 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-sm4bg" event={"ID":"ddff9a4d-a020-4de4-a114-694bec9908f9","Type":"ContainerDied","Data":"c3d7238ac1459cfd25423978aa6be3db4a7dc0de917f4e2c4b2f8ff6306c34a5"} Dec 05 12:30:43 crc kubenswrapper[4711]: I1205 12:30:43.167647 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-744fd5f788-bs9bc" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon-log" containerID="cri-o://a8b2276036c2d8555cc86ef88f650eefe07004dceb19cb735a3726cc375238af" gracePeriod=30 Dec 05 12:30:43 crc kubenswrapper[4711]: I1205 12:30:43.167691 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-744fd5f788-bs9bc" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon" containerID="cri-o://d8992c9aa5235fde93e9e62efbeb7b298a387f545314aa2553dcff5475ec8e93" gracePeriod=30 Dec 05 12:30:43 crc kubenswrapper[4711]: I1205 12:30:43.168289 4711 scope.go:117] "RemoveContainer" containerID="7c9787731bc32175de19ee5ab017cbe5383840f61fbb915edbfb4e49a571c912" Dec 05 12:30:43 crc kubenswrapper[4711]: E1205 12:30:43.168570 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.653865 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.728191 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wpzd\" (UniqueName: \"kubernetes.io/projected/ddff9a4d-a020-4de4-a114-694bec9908f9-kube-api-access-2wpzd\") pod \"ddff9a4d-a020-4de4-a114-694bec9908f9\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.728296 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-combined-ca-bundle\") pod \"ddff9a4d-a020-4de4-a114-694bec9908f9\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.728432 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-db-sync-config-data\") pod \"ddff9a4d-a020-4de4-a114-694bec9908f9\" (UID: \"ddff9a4d-a020-4de4-a114-694bec9908f9\") " Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.734080 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ddff9a4d-a020-4de4-a114-694bec9908f9" (UID: "ddff9a4d-a020-4de4-a114-694bec9908f9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.740661 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddff9a4d-a020-4de4-a114-694bec9908f9-kube-api-access-2wpzd" (OuterVolumeSpecName: "kube-api-access-2wpzd") pod "ddff9a4d-a020-4de4-a114-694bec9908f9" (UID: "ddff9a4d-a020-4de4-a114-694bec9908f9"). InnerVolumeSpecName "kube-api-access-2wpzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.761664 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ddff9a4d-a020-4de4-a114-694bec9908f9" (UID: "ddff9a4d-a020-4de4-a114-694bec9908f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.831197 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.831239 4711 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddff9a4d-a020-4de4-a114-694bec9908f9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:44 crc kubenswrapper[4711]: I1205 12:30:44.831249 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wpzd\" (UniqueName: \"kubernetes.io/projected/ddff9a4d-a020-4de4-a114-694bec9908f9-kube-api-access-2wpzd\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.184829 4711 generic.go:334] "Generic (PLEG): container finished" podID="53844e89-65d7-4cbc-b375-dbfef360857b" containerID="d8992c9aa5235fde93e9e62efbeb7b298a387f545314aa2553dcff5475ec8e93" exitCode=0 Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.184886 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-744fd5f788-bs9bc" event={"ID":"53844e89-65d7-4cbc-b375-dbfef360857b","Type":"ContainerDied","Data":"d8992c9aa5235fde93e9e62efbeb7b298a387f545314aa2553dcff5475ec8e93"} Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.186169 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-sm4bg" event={"ID":"ddff9a4d-a020-4de4-a114-694bec9908f9","Type":"ContainerDied","Data":"33008daecc79f4777cf9131251960b5e280a934dd75dc5cae4d1370495e62487"} Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.186199 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33008daecc79f4777cf9131251960b5e280a934dd75dc5cae4d1370495e62487" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.186243 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-sm4bg" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.381097 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-65744cb55-qz24w"] Dec 05 12:30:45 crc kubenswrapper[4711]: E1205 12:30:45.381865 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerName="neutron-httpd" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.381883 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerName="neutron-httpd" Dec 05 12:30:45 crc kubenswrapper[4711]: E1205 12:30:45.381924 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerName="neutron-api" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.381931 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerName="neutron-api" Dec 05 12:30:45 crc kubenswrapper[4711]: E1205 12:30:45.381950 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddff9a4d-a020-4de4-a114-694bec9908f9" containerName="barbican-db-sync" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.381957 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddff9a4d-a020-4de4-a114-694bec9908f9" containerName="barbican-db-sync" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.382166 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddff9a4d-a020-4de4-a114-694bec9908f9" containerName="barbican-db-sync" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.382181 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerName="neutron-httpd" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.382188 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="76c7c0ee-1c4f-409f-9706-663ff94af792" containerName="neutron-api" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.383499 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.387352 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2ckjg" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.389179 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.389455 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.420111 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-65744cb55-qz24w"] Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.464548 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483e0862-32c8-445a-a8b5-745f71c7cb3f-combined-ca-bundle\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.464645 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483e0862-32c8-445a-a8b5-745f71c7cb3f-config-data\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.464678 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/483e0862-32c8-445a-a8b5-745f71c7cb3f-config-data-custom\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.464762 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcbrj\" (UniqueName: \"kubernetes.io/projected/483e0862-32c8-445a-a8b5-745f71c7cb3f-kube-api-access-pcbrj\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.464955 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/483e0862-32c8-445a-a8b5-745f71c7cb3f-logs\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.486450 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5467d6b846-6zlkw"] Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.487927 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.491884 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.513850 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5467d6b846-6zlkw"] Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.544916 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7896bb789c-hpqcm"] Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.546930 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.567689 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcbrj\" (UniqueName: \"kubernetes.io/projected/483e0862-32c8-445a-a8b5-745f71c7cb3f-kube-api-access-pcbrj\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.567799 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/483e0862-32c8-445a-a8b5-745f71c7cb3f-logs\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.567859 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83f5120f-4476-4a96-b1d9-238db3564735-combined-ca-bundle\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.567917 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/83f5120f-4476-4a96-b1d9-238db3564735-config-data-custom\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.567954 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83f5120f-4476-4a96-b1d9-238db3564735-config-data\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.567979 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483e0862-32c8-445a-a8b5-745f71c7cb3f-combined-ca-bundle\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.568019 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qccr\" (UniqueName: \"kubernetes.io/projected/83f5120f-4476-4a96-b1d9-238db3564735-kube-api-access-9qccr\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.568062 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483e0862-32c8-445a-a8b5-745f71c7cb3f-config-data\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.568083 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/483e0862-32c8-445a-a8b5-745f71c7cb3f-config-data-custom\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.568109 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83f5120f-4476-4a96-b1d9-238db3564735-logs\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.568841 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/483e0862-32c8-445a-a8b5-745f71c7cb3f-logs\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.578118 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483e0862-32c8-445a-a8b5-745f71c7cb3f-config-data\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.578978 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483e0862-32c8-445a-a8b5-745f71c7cb3f-combined-ca-bundle\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.580898 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7896bb789c-hpqcm"] Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.585694 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/483e0862-32c8-445a-a8b5-745f71c7cb3f-config-data-custom\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.600346 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcbrj\" (UniqueName: \"kubernetes.io/projected/483e0862-32c8-445a-a8b5-745f71c7cb3f-kube-api-access-pcbrj\") pod \"barbican-worker-65744cb55-qz24w\" (UID: \"483e0862-32c8-445a-a8b5-745f71c7cb3f\") " pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.669793 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-sb\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.671128 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mncw\" (UniqueName: \"kubernetes.io/projected/256e8055-6879-4b2e-ae25-3603d2546b91-kube-api-access-2mncw\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.671293 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-config\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.671453 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-swift-storage-0\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.671859 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-svc\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.672134 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83f5120f-4476-4a96-b1d9-238db3564735-combined-ca-bundle\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.672201 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-nb\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.672341 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/83f5120f-4476-4a96-b1d9-238db3564735-config-data-custom\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.672460 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83f5120f-4476-4a96-b1d9-238db3564735-config-data\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.672541 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qccr\" (UniqueName: \"kubernetes.io/projected/83f5120f-4476-4a96-b1d9-238db3564735-kube-api-access-9qccr\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.672780 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83f5120f-4476-4a96-b1d9-238db3564735-logs\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.673153 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83f5120f-4476-4a96-b1d9-238db3564735-logs\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.678855 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/83f5120f-4476-4a96-b1d9-238db3564735-config-data-custom\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.680082 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83f5120f-4476-4a96-b1d9-238db3564735-combined-ca-bundle\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.685473 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83f5120f-4476-4a96-b1d9-238db3564735-config-data\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.695749 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-547b54f9fb-c79cw"] Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.697243 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.700757 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.701028 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65744cb55-qz24w" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.707769 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qccr\" (UniqueName: \"kubernetes.io/projected/83f5120f-4476-4a96-b1d9-238db3564735-kube-api-access-9qccr\") pod \"barbican-keystone-listener-5467d6b846-6zlkw\" (UID: \"83f5120f-4476-4a96-b1d9-238db3564735\") " pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.718749 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-547b54f9fb-c79cw"] Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.776687 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-sb\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.776742 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mncw\" (UniqueName: \"kubernetes.io/projected/256e8055-6879-4b2e-ae25-3603d2546b91-kube-api-access-2mncw\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.776785 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-config\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.776813 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-swift-storage-0\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.776855 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-svc\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.776922 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzbn2\" (UniqueName: \"kubernetes.io/projected/19a491f3-df0d-4517-8299-408d5c3cb6be-kube-api-access-bzbn2\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.776946 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-nb\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.777009 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data-custom\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.777041 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.777083 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-combined-ca-bundle\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.777117 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19a491f3-df0d-4517-8299-408d5c3cb6be-logs\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.778599 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-sb\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.779605 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-config\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.780224 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-swift-storage-0\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.780688 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-nb\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.781089 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-svc\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.801222 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mncw\" (UniqueName: \"kubernetes.io/projected/256e8055-6879-4b2e-ae25-3603d2546b91-kube-api-access-2mncw\") pod \"dnsmasq-dns-7896bb789c-hpqcm\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.808851 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.867027 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.879221 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data-custom\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.879277 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.879328 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-combined-ca-bundle\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.879363 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19a491f3-df0d-4517-8299-408d5c3cb6be-logs\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.879694 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzbn2\" (UniqueName: \"kubernetes.io/projected/19a491f3-df0d-4517-8299-408d5c3cb6be-kube-api-access-bzbn2\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.887499 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19a491f3-df0d-4517-8299-408d5c3cb6be-logs\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.888413 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data-custom\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.898916 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-combined-ca-bundle\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.905378 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzbn2\" (UniqueName: \"kubernetes.io/projected/19a491f3-df0d-4517-8299-408d5c3cb6be-kube-api-access-bzbn2\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:45 crc kubenswrapper[4711]: I1205 12:30:45.908494 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data\") pod \"barbican-api-547b54f9fb-c79cw\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:46 crc kubenswrapper[4711]: I1205 12:30:46.148947 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:46 crc kubenswrapper[4711]: I1205 12:30:46.201717 4711 generic.go:334] "Generic (PLEG): container finished" podID="da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" containerID="039647fb3cedc13b9bd16efffa1e1a5553e27cbcbadb9b7a8b3340cbd5e09621" exitCode=0 Dec 05 12:30:46 crc kubenswrapper[4711]: I1205 12:30:46.201768 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-j6vdf" event={"ID":"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661","Type":"ContainerDied","Data":"039647fb3cedc13b9bd16efffa1e1a5553e27cbcbadb9b7a8b3340cbd5e09621"} Dec 05 12:30:46 crc kubenswrapper[4711]: I1205 12:30:46.237222 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-65744cb55-qz24w"] Dec 05 12:30:46 crc kubenswrapper[4711]: I1205 12:30:46.260250 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:30:46 crc kubenswrapper[4711]: W1205 12:30:46.363732 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83f5120f_4476_4a96_b1d9_238db3564735.slice/crio-5cca97436a5cf1ade6b5e98f034269fbff81b32229b0034851f028622970c67a WatchSource:0}: Error finding container 5cca97436a5cf1ade6b5e98f034269fbff81b32229b0034851f028622970c67a: Status 404 returned error can't find the container with id 5cca97436a5cf1ade6b5e98f034269fbff81b32229b0034851f028622970c67a Dec 05 12:30:46 crc kubenswrapper[4711]: I1205 12:30:46.374736 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5467d6b846-6zlkw"] Dec 05 12:30:46 crc kubenswrapper[4711]: I1205 12:30:46.384791 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7896bb789c-hpqcm"] Dec 05 12:30:46 crc kubenswrapper[4711]: I1205 12:30:46.479498 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-547b54f9fb-c79cw"] Dec 05 12:30:46 crc kubenswrapper[4711]: W1205 12:30:46.495989 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19a491f3_df0d_4517_8299_408d5c3cb6be.slice/crio-fea785e110d9f80ad284d4e6079e8208ae542f3900c917da6502d7838d757eac WatchSource:0}: Error finding container fea785e110d9f80ad284d4e6079e8208ae542f3900c917da6502d7838d757eac: Status 404 returned error can't find the container with id fea785e110d9f80ad284d4e6079e8208ae542f3900c917da6502d7838d757eac Dec 05 12:30:47 crc kubenswrapper[4711]: E1205 12:30:47.083779 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f496d6f_8883_493b_866b_117f6b7537e4.slice/crio-52c3df0f2f8b4be4f732bafb25b134cd6ceaaf9ed61888a915be10930b7a2042\": RecentStats: unable to find data in memory cache]" Dec 05 12:30:47 crc kubenswrapper[4711]: I1205 12:30:47.212187 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" event={"ID":"83f5120f-4476-4a96-b1d9-238db3564735","Type":"ContainerStarted","Data":"5cca97436a5cf1ade6b5e98f034269fbff81b32229b0034851f028622970c67a"} Dec 05 12:30:47 crc kubenswrapper[4711]: I1205 12:30:47.213726 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65744cb55-qz24w" event={"ID":"483e0862-32c8-445a-a8b5-745f71c7cb3f","Type":"ContainerStarted","Data":"97fbcff44d9c8c452f1893e49a01c6f935bf6af5486b595e1097503446867fbe"} Dec 05 12:30:47 crc kubenswrapper[4711]: I1205 12:30:47.214980 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" event={"ID":"256e8055-6879-4b2e-ae25-3603d2546b91","Type":"ContainerStarted","Data":"ed6d82c250dffd35867bc7965b6d52327542cec6b6b0d082d2dec57484f57402"} Dec 05 12:30:47 crc kubenswrapper[4711]: I1205 12:30:47.216848 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-547b54f9fb-c79cw" event={"ID":"19a491f3-df0d-4517-8299-408d5c3cb6be","Type":"ContainerStarted","Data":"fea785e110d9f80ad284d4e6079e8208ae542f3900c917da6502d7838d757eac"} Dec 05 12:30:47 crc kubenswrapper[4711]: I1205 12:30:47.607192 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-744fd5f788-bs9bc" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.156:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.156:8443: connect: connection refused" Dec 05 12:30:47 crc kubenswrapper[4711]: I1205 12:30:47.898224 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.056162 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-db-sync-config-data\") pod \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.056282 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-scripts\") pod \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.056318 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7spsq\" (UniqueName: \"kubernetes.io/projected/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-kube-api-access-7spsq\") pod \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.056379 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-config-data\") pod \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.056501 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-etc-machine-id\") pod \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.056693 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-combined-ca-bundle\") pod \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\" (UID: \"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661\") " Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.060178 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" (UID: "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.061923 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-scripts" (OuterVolumeSpecName: "scripts") pod "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" (UID: "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.062505 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" (UID: "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.064670 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-kube-api-access-7spsq" (OuterVolumeSpecName: "kube-api-access-7spsq") pod "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" (UID: "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661"). InnerVolumeSpecName "kube-api-access-7spsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.123129 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" (UID: "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.139274 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-config-data" (OuterVolumeSpecName: "config-data") pod "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" (UID: "da5d7bb4-71d2-458f-aabf-4cb2ed2f4661"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.158813 4711 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.158858 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.158872 4711 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.158899 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.158914 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7spsq\" (UniqueName: \"kubernetes.io/projected/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-kube-api-access-7spsq\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.158929 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.226379 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-547b54f9fb-c79cw" event={"ID":"19a491f3-df0d-4517-8299-408d5c3cb6be","Type":"ContainerStarted","Data":"68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6"} Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.228133 4711 generic.go:334] "Generic (PLEG): container finished" podID="256e8055-6879-4b2e-ae25-3603d2546b91" containerID="6cdeee827948dd7e8f94362c9b454e0150d5a06e230c371e7dec0fe77f76b54b" exitCode=0 Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.228169 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" event={"ID":"256e8055-6879-4b2e-ae25-3603d2546b91","Type":"ContainerDied","Data":"6cdeee827948dd7e8f94362c9b454e0150d5a06e230c371e7dec0fe77f76b54b"} Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.229889 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-j6vdf" event={"ID":"da5d7bb4-71d2-458f-aabf-4cb2ed2f4661","Type":"ContainerDied","Data":"9cac137ba8502b5df95f1fef354db36a05ded81f14fbf84694f875a792b5f807"} Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.229922 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cac137ba8502b5df95f1fef354db36a05ded81f14fbf84694f875a792b5f807" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.229938 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-j6vdf" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.302201 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.302250 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.574321 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:30:48 crc kubenswrapper[4711]: E1205 12:30:48.575132 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" containerName="cinder-db-sync" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.575151 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" containerName="cinder-db-sync" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.575407 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" containerName="cinder-db-sync" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.576554 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.587573 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-j6gqm" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.588349 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.588446 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.588748 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.613483 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.660849 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7896bb789c-hpqcm"] Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.687778 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf2h6\" (UniqueName: \"kubernetes.io/projected/d348b707-8d7c-4cbe-94d6-63a20efc6e54-kube-api-access-qf2h6\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.687823 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-scripts\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.687867 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.687901 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.687940 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d348b707-8d7c-4cbe-94d6-63a20efc6e54-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.687987 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.739222 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554cdf56f-dk7v5"] Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.742069 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554cdf56f-dk7v5"] Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.742166 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.789081 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf2h6\" (UniqueName: \"kubernetes.io/projected/d348b707-8d7c-4cbe-94d6-63a20efc6e54-kube-api-access-qf2h6\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.789120 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-scripts\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.789155 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.789180 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.789211 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d348b707-8d7c-4cbe-94d6-63a20efc6e54-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.789259 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.792172 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.794254 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.794929 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d348b707-8d7c-4cbe-94d6-63a20efc6e54-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.800153 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.809743 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.809901 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.812947 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-scripts\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.818074 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf2h6\" (UniqueName: \"kubernetes.io/projected/d348b707-8d7c-4cbe-94d6-63a20efc6e54-kube-api-access-qf2h6\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.837707 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data\") pod \"cinder-scheduler-0\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.846981 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.851448 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891615 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891699 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04d0a904-080f-4fc8-b44f-29d29693847d-logs\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891735 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data-custom\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891793 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-swift-storage-0\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891816 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-svc\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891833 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891849 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04d0a904-080f-4fc8-b44f-29d29693847d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891870 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r88l\" (UniqueName: \"kubernetes.io/projected/04d0a904-080f-4fc8-b44f-29d29693847d-kube-api-access-6r88l\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891887 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-scripts\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891903 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-config\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891923 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxhwd\" (UniqueName: \"kubernetes.io/projected/707d9a65-b0d5-426c-bc51-d9c14a954afa-kube-api-access-zxhwd\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891953 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-nb\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.891973 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-sb\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.909954 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995634 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-swift-storage-0\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995705 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-svc\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995735 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995760 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04d0a904-080f-4fc8-b44f-29d29693847d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995797 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r88l\" (UniqueName: \"kubernetes.io/projected/04d0a904-080f-4fc8-b44f-29d29693847d-kube-api-access-6r88l\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995822 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-scripts\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995847 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-config\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995881 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxhwd\" (UniqueName: \"kubernetes.io/projected/707d9a65-b0d5-426c-bc51-d9c14a954afa-kube-api-access-zxhwd\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995928 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-nb\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.995964 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-sb\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.996010 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.996084 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04d0a904-080f-4fc8-b44f-29d29693847d-logs\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:48 crc kubenswrapper[4711]: I1205 12:30:48.996122 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data-custom\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.001283 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-swift-storage-0\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.001861 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-nb\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.002437 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-config\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.002918 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-scripts\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.002992 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04d0a904-080f-4fc8-b44f-29d29693847d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.005646 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-sb\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.005867 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04d0a904-080f-4fc8-b44f-29d29693847d-logs\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.011085 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data-custom\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.011652 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-svc\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.014292 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.015670 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.026108 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxhwd\" (UniqueName: \"kubernetes.io/projected/707d9a65-b0d5-426c-bc51-d9c14a954afa-kube-api-access-zxhwd\") pod \"dnsmasq-dns-8554cdf56f-dk7v5\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.036057 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r88l\" (UniqueName: \"kubernetes.io/projected/04d0a904-080f-4fc8-b44f-29d29693847d-kube-api-access-6r88l\") pod \"cinder-api-0\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " pod="openstack/cinder-api-0" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.086191 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.210254 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.250229 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5885f55d9b-n67kv"] Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.253590 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.263257 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.263488 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.265774 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5885f55d9b-n67kv"] Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.303329 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-547b54f9fb-c79cw" event={"ID":"19a491f3-df0d-4517-8299-408d5c3cb6be","Type":"ContainerStarted","Data":"43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972"} Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.303419 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.303450 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.312245 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" event={"ID":"256e8055-6879-4b2e-ae25-3603d2546b91","Type":"ContainerStarted","Data":"4bb99b0c027471f113ed1a596bfa6040945db8778af22edacf1f3ba44dd48521"} Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.312432 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" podUID="256e8055-6879-4b2e-ae25-3603d2546b91" containerName="dnsmasq-dns" containerID="cri-o://4bb99b0c027471f113ed1a596bfa6040945db8778af22edacf1f3ba44dd48521" gracePeriod=10 Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.312585 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.329614 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-547b54f9fb-c79cw" podStartSLOduration=4.329597408 podStartE2EDuration="4.329597408s" podCreationTimestamp="2025-12-05 12:30:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:49.324917623 +0000 UTC m=+1294.909239973" watchObservedRunningTime="2025-12-05 12:30:49.329597408 +0000 UTC m=+1294.913919738" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.356199 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" podStartSLOduration=4.3561767 podStartE2EDuration="4.3561767s" podCreationTimestamp="2025-12-05 12:30:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:49.352447598 +0000 UTC m=+1294.936769938" watchObservedRunningTime="2025-12-05 12:30:49.3561767 +0000 UTC m=+1294.940499030" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.406644 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvkc6\" (UniqueName: \"kubernetes.io/projected/d605ba82-4b40-4729-a7e7-a038bab81b2b-kube-api-access-cvkc6\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.406713 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-config-data\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.406803 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-combined-ca-bundle\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.406853 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-public-tls-certs\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.406891 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d605ba82-4b40-4729-a7e7-a038bab81b2b-logs\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.406935 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-config-data-custom\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.407197 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-internal-tls-certs\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.511581 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-internal-tls-certs\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.511690 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvkc6\" (UniqueName: \"kubernetes.io/projected/d605ba82-4b40-4729-a7e7-a038bab81b2b-kube-api-access-cvkc6\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.511726 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-config-data\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.512610 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-combined-ca-bundle\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.512702 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-public-tls-certs\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.512754 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d605ba82-4b40-4729-a7e7-a038bab81b2b-logs\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.512822 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-config-data-custom\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.513922 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d605ba82-4b40-4729-a7e7-a038bab81b2b-logs\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.516897 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-internal-tls-certs\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.517585 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-config-data-custom\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.519672 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-combined-ca-bundle\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.521037 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-config-data\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.521257 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d605ba82-4b40-4729-a7e7-a038bab81b2b-public-tls-certs\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.534465 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvkc6\" (UniqueName: \"kubernetes.io/projected/d605ba82-4b40-4729-a7e7-a038bab81b2b-kube-api-access-cvkc6\") pod \"barbican-api-5885f55d9b-n67kv\" (UID: \"d605ba82-4b40-4729-a7e7-a038bab81b2b\") " pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:49 crc kubenswrapper[4711]: I1205 12:30:49.591437 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.351326 4711 generic.go:334] "Generic (PLEG): container finished" podID="256e8055-6879-4b2e-ae25-3603d2546b91" containerID="4bb99b0c027471f113ed1a596bfa6040945db8778af22edacf1f3ba44dd48521" exitCode=0 Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.351480 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" event={"ID":"256e8055-6879-4b2e-ae25-3603d2546b91","Type":"ContainerDied","Data":"4bb99b0c027471f113ed1a596bfa6040945db8778af22edacf1f3ba44dd48521"} Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.351792 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" event={"ID":"256e8055-6879-4b2e-ae25-3603d2546b91","Type":"ContainerDied","Data":"ed6d82c250dffd35867bc7965b6d52327542cec6b6b0d082d2dec57484f57402"} Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.351809 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed6d82c250dffd35867bc7965b6d52327542cec6b6b0d082d2dec57484f57402" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.358206 4711 generic.go:334] "Generic (PLEG): container finished" podID="5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" containerID="6930b6c7d4ac8c11c92e437f563aa3ea5fa4d8ffbb9a75e0aab2327b4cdd0aa2" exitCode=0 Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.359228 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5gbrt" event={"ID":"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b","Type":"ContainerDied","Data":"6930b6c7d4ac8c11c92e437f563aa3ea5fa4d8ffbb9a75e0aab2327b4cdd0aa2"} Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.421091 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.438235 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mncw\" (UniqueName: \"kubernetes.io/projected/256e8055-6879-4b2e-ae25-3603d2546b91-kube-api-access-2mncw\") pod \"256e8055-6879-4b2e-ae25-3603d2546b91\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.438321 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-config\") pod \"256e8055-6879-4b2e-ae25-3603d2546b91\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.438451 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-sb\") pod \"256e8055-6879-4b2e-ae25-3603d2546b91\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.438607 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-swift-storage-0\") pod \"256e8055-6879-4b2e-ae25-3603d2546b91\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.438656 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-nb\") pod \"256e8055-6879-4b2e-ae25-3603d2546b91\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.438797 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-svc\") pod \"256e8055-6879-4b2e-ae25-3603d2546b91\" (UID: \"256e8055-6879-4b2e-ae25-3603d2546b91\") " Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.468807 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/256e8055-6879-4b2e-ae25-3603d2546b91-kube-api-access-2mncw" (OuterVolumeSpecName: "kube-api-access-2mncw") pod "256e8055-6879-4b2e-ae25-3603d2546b91" (UID: "256e8055-6879-4b2e-ae25-3603d2546b91"). InnerVolumeSpecName "kube-api-access-2mncw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.544353 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mncw\" (UniqueName: \"kubernetes.io/projected/256e8055-6879-4b2e-ae25-3603d2546b91-kube-api-access-2mncw\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.582914 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "256e8055-6879-4b2e-ae25-3603d2546b91" (UID: "256e8055-6879-4b2e-ae25-3603d2546b91"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.648975 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.724540 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "256e8055-6879-4b2e-ae25-3603d2546b91" (UID: "256e8055-6879-4b2e-ae25-3603d2546b91"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.735812 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-config" (OuterVolumeSpecName: "config") pod "256e8055-6879-4b2e-ae25-3603d2546b91" (UID: "256e8055-6879-4b2e-ae25-3603d2546b91"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.758578 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.758617 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.758743 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "256e8055-6879-4b2e-ae25-3603d2546b91" (UID: "256e8055-6879-4b2e-ae25-3603d2546b91"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.783086 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "256e8055-6879-4b2e-ae25-3603d2546b91" (UID: "256e8055-6879-4b2e-ae25-3603d2546b91"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.862916 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.862957 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/256e8055-6879-4b2e-ae25-3603d2546b91-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.952503 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.952551 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.952564 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.953294 4711 scope.go:117] "RemoveContainer" containerID="7c9787731bc32175de19ee5ab017cbe5383840f61fbb915edbfb4e49a571c912" Dec 05 12:30:50 crc kubenswrapper[4711]: E1205 12:30:50.953679 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:30:50 crc kubenswrapper[4711]: I1205 12:30:50.996409 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554cdf56f-dk7v5"] Dec 05 12:30:51 crc kubenswrapper[4711]: W1205 12:30:51.216282 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd605ba82_4b40_4729_a7e7_a038bab81b2b.slice/crio-98ea71bde8ee91b96fa4cd0d531b7260001ecea2e894333380ef2947342f0cac WatchSource:0}: Error finding container 98ea71bde8ee91b96fa4cd0d531b7260001ecea2e894333380ef2947342f0cac: Status 404 returned error can't find the container with id 98ea71bde8ee91b96fa4cd0d531b7260001ecea2e894333380ef2947342f0cac Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.219900 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5885f55d9b-n67kv"] Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.336763 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:30:51 crc kubenswrapper[4711]: W1205 12:30:51.340846 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd348b707_8d7c_4cbe_94d6_63a20efc6e54.slice/crio-02e9cb8a17c35e7a262d6a9f9477dcb7ddfe25d62b1e7b7b2a712499b6a42241 WatchSource:0}: Error finding container 02e9cb8a17c35e7a262d6a9f9477dcb7ddfe25d62b1e7b7b2a712499b6a42241: Status 404 returned error can't find the container with id 02e9cb8a17c35e7a262d6a9f9477dcb7ddfe25d62b1e7b7b2a712499b6a42241 Dec 05 12:30:51 crc kubenswrapper[4711]: W1205 12:30:51.362286 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04d0a904_080f_4fc8_b44f_29d29693847d.slice/crio-0e84da4937d0ab617e9fb2206f8ca18b81627bb8add0be9259def4a9b87b6093 WatchSource:0}: Error finding container 0e84da4937d0ab617e9fb2206f8ca18b81627bb8add0be9259def4a9b87b6093: Status 404 returned error can't find the container with id 0e84da4937d0ab617e9fb2206f8ca18b81627bb8add0be9259def4a9b87b6093 Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.362560 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.369972 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d348b707-8d7c-4cbe-94d6-63a20efc6e54","Type":"ContainerStarted","Data":"02e9cb8a17c35e7a262d6a9f9477dcb7ddfe25d62b1e7b7b2a712499b6a42241"} Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.371892 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" event={"ID":"83f5120f-4476-4a96-b1d9-238db3564735","Type":"ContainerStarted","Data":"853644e26bd46970d6ccef2210f9535922c1312d1526feed7157d6ea677413cf"} Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.373906 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65744cb55-qz24w" event={"ID":"483e0862-32c8-445a-a8b5-745f71c7cb3f","Type":"ContainerStarted","Data":"dea572d5ac48aede63bf6081fe0496a7dfdad7363eaf85c8188609d9052789ce"} Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.374945 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" event={"ID":"707d9a65-b0d5-426c-bc51-d9c14a954afa","Type":"ContainerStarted","Data":"569676fbc0cf2d18487a708b2b8049ceaaae26ae07b876a88dec629e87be8832"} Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.376008 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5885f55d9b-n67kv" event={"ID":"d605ba82-4b40-4729-a7e7-a038bab81b2b","Type":"ContainerStarted","Data":"98ea71bde8ee91b96fa4cd0d531b7260001ecea2e894333380ef2947342f0cac"} Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.376057 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7896bb789c-hpqcm" Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.427126 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7896bb789c-hpqcm"] Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.447805 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7896bb789c-hpqcm"] Dec 05 12:30:51 crc kubenswrapper[4711]: I1205 12:30:51.892499 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5gbrt" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.017207 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-combined-ca-bundle\") pod \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.017993 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-db-sync-config-data\") pod \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.018028 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msfbn\" (UniqueName: \"kubernetes.io/projected/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-kube-api-access-msfbn\") pod \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.018082 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-config-data\") pod \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\" (UID: \"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b\") " Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.024635 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" (UID: "5e7e345d-f9d1-4c96-9da9-b960d54c7b5b"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.024685 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-kube-api-access-msfbn" (OuterVolumeSpecName: "kube-api-access-msfbn") pod "5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" (UID: "5e7e345d-f9d1-4c96-9da9-b960d54c7b5b"). InnerVolumeSpecName "kube-api-access-msfbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.071741 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" (UID: "5e7e345d-f9d1-4c96-9da9-b960d54c7b5b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.092229 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-config-data" (OuterVolumeSpecName: "config-data") pod "5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" (UID: "5e7e345d-f9d1-4c96-9da9-b960d54c7b5b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.120590 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.120623 4711 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.120633 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msfbn\" (UniqueName: \"kubernetes.io/projected/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-kube-api-access-msfbn\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.120645 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.161187 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.385460 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04d0a904-080f-4fc8-b44f-29d29693847d","Type":"ContainerStarted","Data":"0e84da4937d0ab617e9fb2206f8ca18b81627bb8add0be9259def4a9b87b6093"} Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.386731 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5gbrt" event={"ID":"5e7e345d-f9d1-4c96-9da9-b960d54c7b5b","Type":"ContainerDied","Data":"c55ff26c7715ca829e214e783d855c9f4e18cb2d3c4230a2d12080d271a1b102"} Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.386785 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c55ff26c7715ca829e214e783d855c9f4e18cb2d3c4230a2d12080d271a1b102" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.386842 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5gbrt" Dec 05 12:30:52 crc kubenswrapper[4711]: I1205 12:30:52.696788 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="256e8055-6879-4b2e-ae25-3603d2546b91" path="/var/lib/kubelet/pods/256e8055-6879-4b2e-ae25-3603d2546b91/volumes" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.094299 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554cdf56f-dk7v5"] Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.128094 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b748ccb4c-d9fwx"] Dec 05 12:30:56 crc kubenswrapper[4711]: E1205 12:30:56.128723 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="256e8055-6879-4b2e-ae25-3603d2546b91" containerName="init" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.128826 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="256e8055-6879-4b2e-ae25-3603d2546b91" containerName="init" Dec 05 12:30:56 crc kubenswrapper[4711]: E1205 12:30:56.128902 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="256e8055-6879-4b2e-ae25-3603d2546b91" containerName="dnsmasq-dns" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.128961 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="256e8055-6879-4b2e-ae25-3603d2546b91" containerName="dnsmasq-dns" Dec 05 12:30:56 crc kubenswrapper[4711]: E1205 12:30:56.129048 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" containerName="glance-db-sync" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.129113 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" containerName="glance-db-sync" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.129473 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="256e8055-6879-4b2e-ae25-3603d2546b91" containerName="dnsmasq-dns" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.129575 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" containerName="glance-db-sync" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.130875 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.156584 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b748ccb4c-d9fwx"] Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.307727 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-sb\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.307793 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-config\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.307898 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-swift-storage-0\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.307993 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-svc\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.308019 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4wtv\" (UniqueName: \"kubernetes.io/projected/49030595-188e-44f8-9602-a8397952c540-kube-api-access-t4wtv\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.308049 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-nb\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.412780 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-svc\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.412842 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4wtv\" (UniqueName: \"kubernetes.io/projected/49030595-188e-44f8-9602-a8397952c540-kube-api-access-t4wtv\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.412882 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-nb\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.412952 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-sb\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.412981 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-config\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.413065 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-swift-storage-0\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.414159 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-swift-storage-0\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.414218 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-svc\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.418566 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-sb\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.430514 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-config\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.431258 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-nb\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.448870 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4wtv\" (UniqueName: \"kubernetes.io/projected/49030595-188e-44f8-9602-a8397952c540-kube-api-access-t4wtv\") pod \"dnsmasq-dns-7b748ccb4c-d9fwx\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.456632 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.476185 4711 generic.go:334] "Generic (PLEG): container finished" podID="707d9a65-b0d5-426c-bc51-d9c14a954afa" containerID="3a1fca4ec087f982d3a6aa0524e79a7d4d8c5a8193935b8b6f732b75368c6a0f" exitCode=0 Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.476411 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" event={"ID":"707d9a65-b0d5-426c-bc51-d9c14a954afa","Type":"ContainerDied","Data":"3a1fca4ec087f982d3a6aa0524e79a7d4d8c5a8193935b8b6f732b75368c6a0f"} Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.501689 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5885f55d9b-n67kv" event={"ID":"d605ba82-4b40-4729-a7e7-a038bab81b2b","Type":"ContainerStarted","Data":"1fedd074343f91b8758a3fb77d9b879a17d19bbecefcb964c6bd0c15834329ea"} Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.527866 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04d0a904-080f-4fc8-b44f-29d29693847d","Type":"ContainerStarted","Data":"185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285"} Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.590934 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" event={"ID":"83f5120f-4476-4a96-b1d9-238db3564735","Type":"ContainerStarted","Data":"66c1fdf53bd0394547f060d6d800263668b11491c82b9a971492286d0e1ec651"} Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.610787 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65744cb55-qz24w" event={"ID":"483e0862-32c8-445a-a8b5-745f71c7cb3f","Type":"ContainerStarted","Data":"23d5b540ca770b7d0e0b9afb2d91d5a17305e834d12004755b8c2b96265bd6db"} Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.647174 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5467d6b846-6zlkw" podStartSLOduration=7.762366732 podStartE2EDuration="11.64715634s" podCreationTimestamp="2025-12-05 12:30:45 +0000 UTC" firstStartedPulling="2025-12-05 12:30:46.366340147 +0000 UTC m=+1291.950662477" lastFinishedPulling="2025-12-05 12:30:50.251129755 +0000 UTC m=+1295.835452085" observedRunningTime="2025-12-05 12:30:56.636755654 +0000 UTC m=+1302.221077984" watchObservedRunningTime="2025-12-05 12:30:56.64715634 +0000 UTC m=+1302.231478670" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.672234 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-65744cb55-qz24w" podStartSLOduration=7.681646452 podStartE2EDuration="11.672213964s" podCreationTimestamp="2025-12-05 12:30:45 +0000 UTC" firstStartedPulling="2025-12-05 12:30:46.259919397 +0000 UTC m=+1291.844241727" lastFinishedPulling="2025-12-05 12:30:50.250486909 +0000 UTC m=+1295.834809239" observedRunningTime="2025-12-05 12:30:56.666985286 +0000 UTC m=+1302.251307616" watchObservedRunningTime="2025-12-05 12:30:56.672213964 +0000 UTC m=+1302.256536294" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.931061 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.932709 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.935279 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.935498 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.943326 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-c4pgg" Dec 05 12:30:56 crc kubenswrapper[4711]: I1205 12:30:56.979149 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.036917 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn79k\" (UniqueName: \"kubernetes.io/projected/fff898c3-d393-432c-9c66-7bf5a4efcb9b-kube-api-access-cn79k\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.036984 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.037007 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-logs\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.037039 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.037087 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-config-data\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.037126 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.037158 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-scripts\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.138348 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn79k\" (UniqueName: \"kubernetes.io/projected/fff898c3-d393-432c-9c66-7bf5a4efcb9b-kube-api-access-cn79k\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.138451 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.138478 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-logs\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.138523 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.138572 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-config-data\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.138612 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.138640 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-scripts\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.138980 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.139216 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.139686 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-logs\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.144341 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.145421 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-config-data\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.150549 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-547b54f9fb-c79cw" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.172:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.167115 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn79k\" (UniqueName: \"kubernetes.io/projected/fff898c3-d393-432c-9c66-7bf5a4efcb9b-kube-api-access-cn79k\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.202876 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.221660 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-scripts\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.252461 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:30:57 crc kubenswrapper[4711]: E1205 12:30:57.252920 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707d9a65-b0d5-426c-bc51-d9c14a954afa" containerName="init" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.252943 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="707d9a65-b0d5-426c-bc51-d9c14a954afa" containerName="init" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.253167 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="707d9a65-b0d5-426c-bc51-d9c14a954afa" containerName="init" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.254387 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.261571 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.266905 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.282905 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.305614 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.343139 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-svc\") pod \"707d9a65-b0d5-426c-bc51-d9c14a954afa\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.344831 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-sb\") pod \"707d9a65-b0d5-426c-bc51-d9c14a954afa\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.345014 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-nb\") pod \"707d9a65-b0d5-426c-bc51-d9c14a954afa\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.345152 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxhwd\" (UniqueName: \"kubernetes.io/projected/707d9a65-b0d5-426c-bc51-d9c14a954afa-kube-api-access-zxhwd\") pod \"707d9a65-b0d5-426c-bc51-d9c14a954afa\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.345267 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-swift-storage-0\") pod \"707d9a65-b0d5-426c-bc51-d9c14a954afa\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.345424 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-config\") pod \"707d9a65-b0d5-426c-bc51-d9c14a954afa\" (UID: \"707d9a65-b0d5-426c-bc51-d9c14a954afa\") " Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.347216 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.347670 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-config-data\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.348072 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twjgg\" (UniqueName: \"kubernetes.io/projected/914619af-2eb4-4749-bd87-94898ad33754-kube-api-access-twjgg\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.348243 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-logs\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.348663 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-scripts\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.349149 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.349427 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.417667 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/707d9a65-b0d5-426c-bc51-d9c14a954afa-kube-api-access-zxhwd" (OuterVolumeSpecName: "kube-api-access-zxhwd") pod "707d9a65-b0d5-426c-bc51-d9c14a954afa" (UID: "707d9a65-b0d5-426c-bc51-d9c14a954afa"). InnerVolumeSpecName "kube-api-access-zxhwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.435973 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "707d9a65-b0d5-426c-bc51-d9c14a954afa" (UID: "707d9a65-b0d5-426c-bc51-d9c14a954afa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.463809 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-config-data\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.464371 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "707d9a65-b0d5-426c-bc51-d9c14a954afa" (UID: "707d9a65-b0d5-426c-bc51-d9c14a954afa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.463895 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twjgg\" (UniqueName: \"kubernetes.io/projected/914619af-2eb4-4749-bd87-94898ad33754-kube-api-access-twjgg\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.466691 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-logs\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.466757 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-scripts\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.466959 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.469098 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-logs\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.472843 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.473115 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.474050 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.474106 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.474123 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxhwd\" (UniqueName: \"kubernetes.io/projected/707d9a65-b0d5-426c-bc51-d9c14a954afa-kube-api-access-zxhwd\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.474508 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.474942 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.481177 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-config" (OuterVolumeSpecName: "config") pod "707d9a65-b0d5-426c-bc51-d9c14a954afa" (UID: "707d9a65-b0d5-426c-bc51-d9c14a954afa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.483266 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "707d9a65-b0d5-426c-bc51-d9c14a954afa" (UID: "707d9a65-b0d5-426c-bc51-d9c14a954afa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.486012 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-scripts\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.486240 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.499951 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twjgg\" (UniqueName: \"kubernetes.io/projected/914619af-2eb4-4749-bd87-94898ad33754-kube-api-access-twjgg\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.507853 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-config-data\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.537118 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "707d9a65-b0d5-426c-bc51-d9c14a954afa" (UID: "707d9a65-b0d5-426c-bc51-d9c14a954afa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.576129 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.576173 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.576185 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/707d9a65-b0d5-426c-bc51-d9c14a954afa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.597870 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.607085 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-744fd5f788-bs9bc" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.156:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.156:8443: connect: connection refused" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.713270 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.716340 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554cdf56f-dk7v5" event={"ID":"707d9a65-b0d5-426c-bc51-d9c14a954afa","Type":"ContainerDied","Data":"569676fbc0cf2d18487a708b2b8049ceaaae26ae07b876a88dec629e87be8832"} Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.716471 4711 scope.go:117] "RemoveContainer" containerID="3a1fca4ec087f982d3a6aa0524e79a7d4d8c5a8193935b8b6f732b75368c6a0f" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.866747 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5885f55d9b-n67kv" event={"ID":"d605ba82-4b40-4729-a7e7-a038bab81b2b","Type":"ContainerStarted","Data":"260236ec8c2d397011691f495b9ea588ceb2e7cef6134721ca34f3cef9a0e1ef"} Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.867347 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.868505 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:30:57 crc kubenswrapper[4711]: I1205 12:30:57.900117 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:30:58 crc kubenswrapper[4711]: I1205 12:30:58.543775 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5885f55d9b-n67kv" podStartSLOduration=9.543755685 podStartE2EDuration="9.543755685s" podCreationTimestamp="2025-12-05 12:30:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:57.89878691 +0000 UTC m=+1303.483109240" watchObservedRunningTime="2025-12-05 12:30:58.543755685 +0000 UTC m=+1304.128078015" Dec 05 12:30:58 crc kubenswrapper[4711]: I1205 12:30:58.658024 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554cdf56f-dk7v5"] Dec 05 12:30:58 crc kubenswrapper[4711]: I1205 12:30:58.674601 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554cdf56f-dk7v5"] Dec 05 12:30:58 crc kubenswrapper[4711]: I1205 12:30:58.987486 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="707d9a65-b0d5-426c-bc51-d9c14a954afa" path="/var/lib/kubelet/pods/707d9a65-b0d5-426c-bc51-d9c14a954afa/volumes" Dec 05 12:30:58 crc kubenswrapper[4711]: I1205 12:30:58.988890 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b748ccb4c-d9fwx"] Dec 05 12:30:59 crc kubenswrapper[4711]: E1205 12:30:59.182187 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f496d6f_8883_493b_866b_117f6b7537e4.slice/crio-52c3df0f2f8b4be4f732bafb25b134cd6ceaaf9ed61888a915be10930b7a2042\": RecentStats: unable to find data in memory cache]" Dec 05 12:30:59 crc kubenswrapper[4711]: I1205 12:30:59.730019 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:30:59 crc kubenswrapper[4711]: I1205 12:30:59.865692 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:30:59 crc kubenswrapper[4711]: I1205 12:30:59.961146 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d348b707-8d7c-4cbe-94d6-63a20efc6e54","Type":"ContainerStarted","Data":"0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a"} Dec 05 12:30:59 crc kubenswrapper[4711]: I1205 12:30:59.978723 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fff898c3-d393-432c-9c66-7bf5a4efcb9b","Type":"ContainerStarted","Data":"d3120fd6065940bd2ca61333276cedc7c4d3cf549e346b8bea1f17df38486f4b"} Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.010726 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04d0a904-080f-4fc8-b44f-29d29693847d","Type":"ContainerStarted","Data":"0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa"} Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.011031 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" containerName="cinder-api-log" containerID="cri-o://185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285" gracePeriod=30 Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.011308 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.011685 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" containerName="cinder-api" containerID="cri-o://0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa" gracePeriod=30 Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.029672 4711 generic.go:334] "Generic (PLEG): container finished" podID="49030595-188e-44f8-9602-a8397952c540" containerID="f40d4b3874250828a227c9e9bdcdbd2e21d9d581da2e75ca24b5fdfea94fe514" exitCode=0 Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.031614 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" event={"ID":"49030595-188e-44f8-9602-a8397952c540","Type":"ContainerDied","Data":"f40d4b3874250828a227c9e9bdcdbd2e21d9d581da2e75ca24b5fdfea94fe514"} Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.031650 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" event={"ID":"49030595-188e-44f8-9602-a8397952c540","Type":"ContainerStarted","Data":"99249e91910199593d8870ded3a7c0f1117c33a7e6760bbe38bfc95b6d403ac5"} Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.051758 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.076872 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=12.076852728 podStartE2EDuration="12.076852728s" podCreationTimestamp="2025-12-05 12:30:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:00.067769716 +0000 UTC m=+1305.652092066" watchObservedRunningTime="2025-12-05 12:31:00.076852728 +0000 UTC m=+1305.661175058" Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.190925 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-547b54f9fb-c79cw" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.172:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.520975 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:00 crc kubenswrapper[4711]: W1205 12:31:00.562746 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod914619af_2eb4_4749_bd87_94898ad33754.slice/crio-311a1bda10698be4ab4b97272cba9b3bf86f61924a0321eab04d7a619a806fb3 WatchSource:0}: Error finding container 311a1bda10698be4ab4b97272cba9b3bf86f61924a0321eab04d7a619a806fb3: Status 404 returned error can't find the container with id 311a1bda10698be4ab4b97272cba9b3bf86f61924a0321eab04d7a619a806fb3 Dec 05 12:31:00 crc kubenswrapper[4711]: I1205 12:31:00.790475 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:31:01 crc kubenswrapper[4711]: I1205 12:31:01.077888 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"914619af-2eb4-4749-bd87-94898ad33754","Type":"ContainerStarted","Data":"311a1bda10698be4ab4b97272cba9b3bf86f61924a0321eab04d7a619a806fb3"} Dec 05 12:31:01 crc kubenswrapper[4711]: I1205 12:31:01.082634 4711 generic.go:334] "Generic (PLEG): container finished" podID="04d0a904-080f-4fc8-b44f-29d29693847d" containerID="185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285" exitCode=143 Dec 05 12:31:01 crc kubenswrapper[4711]: I1205 12:31:01.082675 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04d0a904-080f-4fc8-b44f-29d29693847d","Type":"ContainerDied","Data":"185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285"} Dec 05 12:31:01 crc kubenswrapper[4711]: I1205 12:31:01.090412 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:31:02 crc kubenswrapper[4711]: I1205 12:31:02.123027 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d348b707-8d7c-4cbe-94d6-63a20efc6e54","Type":"ContainerStarted","Data":"b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735"} Dec 05 12:31:02 crc kubenswrapper[4711]: I1205 12:31:02.135410 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fff898c3-d393-432c-9c66-7bf5a4efcb9b","Type":"ContainerStarted","Data":"06b967d8e3d5efaab1e5f6e913ab9b1992dbab97cb8a362be5a18674ed0befd9"} Dec 05 12:31:02 crc kubenswrapper[4711]: I1205 12:31:02.149822 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"914619af-2eb4-4749-bd87-94898ad33754","Type":"ContainerStarted","Data":"e01f8ce3dac7129a26b13c67fd6bc17f11002355ccadb595a832f3403641ca59"} Dec 05 12:31:02 crc kubenswrapper[4711]: I1205 12:31:02.155849 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=8.396269559 podStartE2EDuration="14.155826626s" podCreationTimestamp="2025-12-05 12:30:48 +0000 UTC" firstStartedPulling="2025-12-05 12:30:51.343017199 +0000 UTC m=+1296.927339529" lastFinishedPulling="2025-12-05 12:30:57.102574266 +0000 UTC m=+1302.686896596" observedRunningTime="2025-12-05 12:31:02.143461182 +0000 UTC m=+1307.727783522" watchObservedRunningTime="2025-12-05 12:31:02.155826626 +0000 UTC m=+1307.740148946" Dec 05 12:31:02 crc kubenswrapper[4711]: I1205 12:31:02.169817 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" event={"ID":"49030595-188e-44f8-9602-a8397952c540","Type":"ContainerStarted","Data":"f59e27b825d461dcededbbe1849736de5618d1c2e13f6a2ddd822fe459259077"} Dec 05 12:31:02 crc kubenswrapper[4711]: I1205 12:31:02.171490 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:31:02 crc kubenswrapper[4711]: I1205 12:31:02.201916 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" podStartSLOduration=6.201895405 podStartE2EDuration="6.201895405s" podCreationTimestamp="2025-12-05 12:30:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:02.195633412 +0000 UTC m=+1307.779955742" watchObservedRunningTime="2025-12-05 12:31:02.201895405 +0000 UTC m=+1307.786217735" Dec 05 12:31:02 crc kubenswrapper[4711]: I1205 12:31:02.347228 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-545d7cb86d-bpnk9" Dec 05 12:31:02 crc kubenswrapper[4711]: I1205 12:31:02.682770 4711 scope.go:117] "RemoveContainer" containerID="7c9787731bc32175de19ee5ab017cbe5383840f61fbb915edbfb4e49a571c912" Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.183608 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerStarted","Data":"b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74"} Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.188852 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"914619af-2eb4-4749-bd87-94898ad33754","Type":"ContainerStarted","Data":"bbb5cbff41a8cb642dab7fb99ff17700f455e50e13ef8119c3bd52c429d30be9"} Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.189020 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="914619af-2eb4-4749-bd87-94898ad33754" containerName="glance-log" containerID="cri-o://e01f8ce3dac7129a26b13c67fd6bc17f11002355ccadb595a832f3403641ca59" gracePeriod=30 Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.189059 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="914619af-2eb4-4749-bd87-94898ad33754" containerName="glance-httpd" containerID="cri-o://bbb5cbff41a8cb642dab7fb99ff17700f455e50e13ef8119c3bd52c429d30be9" gracePeriod=30 Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.195056 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fff898c3-d393-432c-9c66-7bf5a4efcb9b","Type":"ContainerStarted","Data":"c9ce4c119edba3674f39a8de4f7cf19437ef81de22152f08bc26ffb79c3e0fa2"} Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.195421 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerName="glance-log" containerID="cri-o://06b967d8e3d5efaab1e5f6e913ab9b1992dbab97cb8a362be5a18674ed0befd9" gracePeriod=30 Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.195437 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerName="glance-httpd" containerID="cri-o://c9ce4c119edba3674f39a8de4f7cf19437ef81de22152f08bc26ffb79c3e0fa2" gracePeriod=30 Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.264374 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.264349568 podStartE2EDuration="7.264349568s" podCreationTimestamp="2025-12-05 12:30:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:03.253927933 +0000 UTC m=+1308.838250263" watchObservedRunningTime="2025-12-05 12:31:03.264349568 +0000 UTC m=+1308.848671898" Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.335863 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.335845451 podStartE2EDuration="8.335845451s" podCreationTimestamp="2025-12-05 12:30:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:03.32398122 +0000 UTC m=+1308.908303570" watchObservedRunningTime="2025-12-05 12:31:03.335845451 +0000 UTC m=+1308.920167781" Dec 05 12:31:03 crc kubenswrapper[4711]: I1205 12:31:03.912104 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.226179 4711 generic.go:334] "Generic (PLEG): container finished" podID="914619af-2eb4-4749-bd87-94898ad33754" containerID="bbb5cbff41a8cb642dab7fb99ff17700f455e50e13ef8119c3bd52c429d30be9" exitCode=0 Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.226212 4711 generic.go:334] "Generic (PLEG): container finished" podID="914619af-2eb4-4749-bd87-94898ad33754" containerID="e01f8ce3dac7129a26b13c67fd6bc17f11002355ccadb595a832f3403641ca59" exitCode=143 Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.226251 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"914619af-2eb4-4749-bd87-94898ad33754","Type":"ContainerDied","Data":"bbb5cbff41a8cb642dab7fb99ff17700f455e50e13ef8119c3bd52c429d30be9"} Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.226279 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"914619af-2eb4-4749-bd87-94898ad33754","Type":"ContainerDied","Data":"e01f8ce3dac7129a26b13c67fd6bc17f11002355ccadb595a832f3403641ca59"} Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.235591 4711 generic.go:334] "Generic (PLEG): container finished" podID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerID="c9ce4c119edba3674f39a8de4f7cf19437ef81de22152f08bc26ffb79c3e0fa2" exitCode=0 Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.235623 4711 generic.go:334] "Generic (PLEG): container finished" podID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerID="06b967d8e3d5efaab1e5f6e913ab9b1992dbab97cb8a362be5a18674ed0befd9" exitCode=143 Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.236525 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fff898c3-d393-432c-9c66-7bf5a4efcb9b","Type":"ContainerDied","Data":"c9ce4c119edba3674f39a8de4f7cf19437ef81de22152f08bc26ffb79c3e0fa2"} Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.236550 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fff898c3-d393-432c-9c66-7bf5a4efcb9b","Type":"ContainerDied","Data":"06b967d8e3d5efaab1e5f6e913ab9b1992dbab97cb8a362be5a18674ed0befd9"} Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.499704 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.509157 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615106 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cn79k\" (UniqueName: \"kubernetes.io/projected/fff898c3-d393-432c-9c66-7bf5a4efcb9b-kube-api-access-cn79k\") pod \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615284 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-logs\") pod \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615317 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-config-data\") pod \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615359 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-httpd-run\") pod \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615462 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"914619af-2eb4-4749-bd87-94898ad33754\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615490 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-scripts\") pod \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615547 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-scripts\") pod \"914619af-2eb4-4749-bd87-94898ad33754\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615583 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-config-data\") pod \"914619af-2eb4-4749-bd87-94898ad33754\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615636 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twjgg\" (UniqueName: \"kubernetes.io/projected/914619af-2eb4-4749-bd87-94898ad33754-kube-api-access-twjgg\") pod \"914619af-2eb4-4749-bd87-94898ad33754\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615677 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-httpd-run\") pod \"914619af-2eb4-4749-bd87-94898ad33754\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615706 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615734 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-combined-ca-bundle\") pod \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\" (UID: \"fff898c3-d393-432c-9c66-7bf5a4efcb9b\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615759 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-logs\") pod \"914619af-2eb4-4749-bd87-94898ad33754\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.615807 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-combined-ca-bundle\") pod \"914619af-2eb4-4749-bd87-94898ad33754\" (UID: \"914619af-2eb4-4749-bd87-94898ad33754\") " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.623057 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-logs" (OuterVolumeSpecName: "logs") pod "fff898c3-d393-432c-9c66-7bf5a4efcb9b" (UID: "fff898c3-d393-432c-9c66-7bf5a4efcb9b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.623342 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-logs" (OuterVolumeSpecName: "logs") pod "914619af-2eb4-4749-bd87-94898ad33754" (UID: "914619af-2eb4-4749-bd87-94898ad33754"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.623492 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "914619af-2eb4-4749-bd87-94898ad33754" (UID: "914619af-2eb4-4749-bd87-94898ad33754"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.627138 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fff898c3-d393-432c-9c66-7bf5a4efcb9b" (UID: "fff898c3-d393-432c-9c66-7bf5a4efcb9b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.628315 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fff898c3-d393-432c-9c66-7bf5a4efcb9b-kube-api-access-cn79k" (OuterVolumeSpecName: "kube-api-access-cn79k") pod "fff898c3-d393-432c-9c66-7bf5a4efcb9b" (UID: "fff898c3-d393-432c-9c66-7bf5a4efcb9b"). InnerVolumeSpecName "kube-api-access-cn79k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.632155 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-scripts" (OuterVolumeSpecName: "scripts") pod "914619af-2eb4-4749-bd87-94898ad33754" (UID: "914619af-2eb4-4749-bd87-94898ad33754"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.633136 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-scripts" (OuterVolumeSpecName: "scripts") pod "fff898c3-d393-432c-9c66-7bf5a4efcb9b" (UID: "fff898c3-d393-432c-9c66-7bf5a4efcb9b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.634351 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "fff898c3-d393-432c-9c66-7bf5a4efcb9b" (UID: "fff898c3-d393-432c-9c66-7bf5a4efcb9b"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.644573 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/914619af-2eb4-4749-bd87-94898ad33754-kube-api-access-twjgg" (OuterVolumeSpecName: "kube-api-access-twjgg") pod "914619af-2eb4-4749-bd87-94898ad33754" (UID: "914619af-2eb4-4749-bd87-94898ad33754"). InnerVolumeSpecName "kube-api-access-twjgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.650557 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "914619af-2eb4-4749-bd87-94898ad33754" (UID: "914619af-2eb4-4749-bd87-94898ad33754"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.672598 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "914619af-2eb4-4749-bd87-94898ad33754" (UID: "914619af-2eb4-4749-bd87-94898ad33754"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.694530 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fff898c3-d393-432c-9c66-7bf5a4efcb9b" (UID: "fff898c3-d393-432c-9c66-7bf5a4efcb9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.716682 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-config-data" (OuterVolumeSpecName: "config-data") pod "fff898c3-d393-432c-9c66-7bf5a4efcb9b" (UID: "fff898c3-d393-432c-9c66-7bf5a4efcb9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.719550 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720132 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720151 4711 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fff898c3-d393-432c-9c66-7bf5a4efcb9b-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720177 4711 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720186 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720194 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720203 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twjgg\" (UniqueName: \"kubernetes.io/projected/914619af-2eb4-4749-bd87-94898ad33754-kube-api-access-twjgg\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720213 4711 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720227 4711 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720236 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fff898c3-d393-432c-9c66-7bf5a4efcb9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720246 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/914619af-2eb4-4749-bd87-94898ad33754-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720257 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.720265 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cn79k\" (UniqueName: \"kubernetes.io/projected/fff898c3-d393-432c-9c66-7bf5a4efcb9b-kube-api-access-cn79k\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.742162 4711 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.743162 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-config-data" (OuterVolumeSpecName: "config-data") pod "914619af-2eb4-4749-bd87-94898ad33754" (UID: "914619af-2eb4-4749-bd87-94898ad33754"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.747480 4711 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.822171 4711 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.822214 4711 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:04 crc kubenswrapper[4711]: I1205 12:31:04.822227 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914619af-2eb4-4749-bd87-94898ad33754-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.250239 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fff898c3-d393-432c-9c66-7bf5a4efcb9b","Type":"ContainerDied","Data":"d3120fd6065940bd2ca61333276cedc7c4d3cf549e346b8bea1f17df38486f4b"} Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.250310 4711 scope.go:117] "RemoveContainer" containerID="c9ce4c119edba3674f39a8de4f7cf19437ef81de22152f08bc26ffb79c3e0fa2" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.250459 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.252498 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"914619af-2eb4-4749-bd87-94898ad33754","Type":"ContainerDied","Data":"311a1bda10698be4ab4b97272cba9b3bf86f61924a0321eab04d7a619a806fb3"} Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.252579 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.283952 4711 scope.go:117] "RemoveContainer" containerID="06b967d8e3d5efaab1e5f6e913ab9b1992dbab97cb8a362be5a18674ed0befd9" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.305598 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.315059 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.322687 4711 scope.go:117] "RemoveContainer" containerID="bbb5cbff41a8cb642dab7fb99ff17700f455e50e13ef8119c3bd52c429d30be9" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.390515 4711 scope.go:117] "RemoveContainer" containerID="e01f8ce3dac7129a26b13c67fd6bc17f11002355ccadb595a832f3403641ca59" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.398762 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:05 crc kubenswrapper[4711]: E1205 12:31:05.399273 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="914619af-2eb4-4749-bd87-94898ad33754" containerName="glance-httpd" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.399288 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="914619af-2eb4-4749-bd87-94898ad33754" containerName="glance-httpd" Dec 05 12:31:05 crc kubenswrapper[4711]: E1205 12:31:05.399302 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerName="glance-httpd" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.399310 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerName="glance-httpd" Dec 05 12:31:05 crc kubenswrapper[4711]: E1205 12:31:05.399338 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="914619af-2eb4-4749-bd87-94898ad33754" containerName="glance-log" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.399345 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="914619af-2eb4-4749-bd87-94898ad33754" containerName="glance-log" Dec 05 12:31:05 crc kubenswrapper[4711]: E1205 12:31:05.399397 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerName="glance-log" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.399405 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerName="glance-log" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.399645 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerName="glance-httpd" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.399664 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="914619af-2eb4-4749-bd87-94898ad33754" containerName="glance-log" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.399678 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" containerName="glance-log" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.399700 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="914619af-2eb4-4749-bd87-94898ad33754" containerName="glance-httpd" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.400978 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.415802 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.415871 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.416285 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-c4pgg" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.416605 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.432053 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.453924 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.474999 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.506950 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.509092 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.516789 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.517036 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.535948 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-logs\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.536001 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.536035 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gblp4\" (UniqueName: \"kubernetes.io/projected/d382a84e-a077-4aef-beac-c6be4347ebc3-kube-api-access-gblp4\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.536056 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.536101 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.536141 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.536164 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.536229 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.559690 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.637789 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-logs\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.637857 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.637893 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gblp4\" (UniqueName: \"kubernetes.io/projected/d382a84e-a077-4aef-beac-c6be4347ebc3-kube-api-access-gblp4\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.637936 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-scripts\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.637957 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638027 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638116 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638151 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638204 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638257 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8wdq\" (UniqueName: \"kubernetes.io/projected/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-kube-api-access-t8wdq\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638281 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638311 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638418 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638500 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638547 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-config-data\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638544 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-logs\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638667 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-logs\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.638688 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.639094 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.643297 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.645467 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.649194 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.651699 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.658053 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gblp4\" (UniqueName: \"kubernetes.io/projected/d382a84e-a077-4aef-beac-c6be4347ebc3-kube-api-access-gblp4\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.682083 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.741464 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-scripts\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.741574 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.741625 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8wdq\" (UniqueName: \"kubernetes.io/projected/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-kube-api-access-t8wdq\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.741646 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.741682 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.741701 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.741742 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-config-data\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.741772 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-logs\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.742306 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-logs\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.742750 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.743257 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.743697 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.753130 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-scripts\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.766760 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-config-data\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.770808 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.776089 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.784034 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8wdq\" (UniqueName: \"kubernetes.io/projected/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-kube-api-access-t8wdq\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.816679 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.838972 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.998205 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 12:31:05 crc kubenswrapper[4711]: I1205 12:31:05.999875 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.018850 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-lxzj7" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.019477 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.019613 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.020072 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.049878 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf2ns\" (UniqueName: \"kubernetes.io/projected/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-kube-api-access-kf2ns\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.049941 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.050016 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-openstack-config-secret\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.050049 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-openstack-config\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.157000 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf2ns\" (UniqueName: \"kubernetes.io/projected/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-kube-api-access-kf2ns\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.157065 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.157158 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-openstack-config-secret\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.157199 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-openstack-config\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.158318 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-openstack-config\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.184036 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-openstack-config-secret\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.207750 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf2ns\" (UniqueName: \"kubernetes.io/projected/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-kube-api-access-kf2ns\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.211022 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6bcabfd-50fc-4e19-98ff-9c4f03eb5953-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953\") " pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.459500 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.464548 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.542748 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7745988997-jb8k5"] Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.543375 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7745988997-jb8k5" podUID="7684da94-d794-4cbd-accc-224b649a1c14" containerName="dnsmasq-dns" containerID="cri-o://a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d" gracePeriod=10 Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.597699 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.736472 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="914619af-2eb4-4749-bd87-94898ad33754" path="/var/lib/kubelet/pods/914619af-2eb4-4749-bd87-94898ad33754/volumes" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.744732 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fff898c3-d393-432c-9c66-7bf5a4efcb9b" path="/var/lib/kubelet/pods/fff898c3-d393-432c-9c66-7bf5a4efcb9b/volumes" Dec 05 12:31:06 crc kubenswrapper[4711]: I1205 12:31:06.857526 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:06 crc kubenswrapper[4711]: W1205 12:31:06.902057 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22e37a86_b630_47ee_ad7f_9c6cdcc2fe29.slice/crio-4eb22a93553d0a902c08f65ad2ad2d4e607420729d68b64b253587b29b89fffa WatchSource:0}: Error finding container 4eb22a93553d0a902c08f65ad2ad2d4e607420729d68b64b253587b29b89fffa: Status 404 returned error can't find the container with id 4eb22a93553d0a902c08f65ad2ad2d4e607420729d68b64b253587b29b89fffa Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.025827 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.135974 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.365583 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.409037 4711 generic.go:334] "Generic (PLEG): container finished" podID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerID="c129a0798110b72322c603984a291548de03fe231fb2d3c5c2ef0b8c8ca0577a" exitCode=137 Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.409093 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a059dfe5-97d3-412e-b70b-430bd3ab92b9","Type":"ContainerDied","Data":"c129a0798110b72322c603984a291548de03fe231fb2d3c5c2ef0b8c8ca0577a"} Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.416558 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7745988997-jb8k5" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.413826 4711 generic.go:334] "Generic (PLEG): container finished" podID="7684da94-d794-4cbd-accc-224b649a1c14" containerID="a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d" exitCode=0 Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.418094 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7745988997-jb8k5" event={"ID":"7684da94-d794-4cbd-accc-224b649a1c14","Type":"ContainerDied","Data":"a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d"} Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.418141 4711 scope.go:117] "RemoveContainer" containerID="a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.418344 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7745988997-jb8k5" event={"ID":"7684da94-d794-4cbd-accc-224b649a1c14","Type":"ContainerDied","Data":"a459c8aed21d730a927c545b2f3f19c5e923c600487b11a1c5c66af0ff9f0f8d"} Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.442223 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d382a84e-a077-4aef-beac-c6be4347ebc3","Type":"ContainerStarted","Data":"9224f080e03eb253a39da83a4e6d21fc34add1bb7c8bc88cea4cb808ebd6bdd6"} Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.444496 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953","Type":"ContainerStarted","Data":"5a64d72ccf2f52b2db9bdaf4b0e6fa09caabc6191b83465fba8107b8fec27b38"} Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.448887 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29","Type":"ContainerStarted","Data":"4eb22a93553d0a902c08f65ad2ad2d4e607420729d68b64b253587b29b89fffa"} Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.496755 4711 scope.go:117] "RemoveContainer" containerID="54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.511180 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-svc\") pod \"7684da94-d794-4cbd-accc-224b649a1c14\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.511235 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-swift-storage-0\") pod \"7684da94-d794-4cbd-accc-224b649a1c14\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.511280 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t58rw\" (UniqueName: \"kubernetes.io/projected/7684da94-d794-4cbd-accc-224b649a1c14-kube-api-access-t58rw\") pod \"7684da94-d794-4cbd-accc-224b649a1c14\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.511378 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-nb\") pod \"7684da94-d794-4cbd-accc-224b649a1c14\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.511468 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-config\") pod \"7684da94-d794-4cbd-accc-224b649a1c14\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.511549 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-sb\") pod \"7684da94-d794-4cbd-accc-224b649a1c14\" (UID: \"7684da94-d794-4cbd-accc-224b649a1c14\") " Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.547534 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7684da94-d794-4cbd-accc-224b649a1c14-kube-api-access-t58rw" (OuterVolumeSpecName: "kube-api-access-t58rw") pod "7684da94-d794-4cbd-accc-224b649a1c14" (UID: "7684da94-d794-4cbd-accc-224b649a1c14"). InnerVolumeSpecName "kube-api-access-t58rw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.606920 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-744fd5f788-bs9bc" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.156:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.156:8443: connect: connection refused" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.607042 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.617840 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t58rw\" (UniqueName: \"kubernetes.io/projected/7684da94-d794-4cbd-accc-224b649a1c14-kube-api-access-t58rw\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.658867 4711 scope.go:117] "RemoveContainer" containerID="a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d" Dec 05 12:31:07 crc kubenswrapper[4711]: E1205 12:31:07.664473 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d\": container with ID starting with a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d not found: ID does not exist" containerID="a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.664499 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d"} err="failed to get container status \"a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d\": rpc error: code = NotFound desc = could not find container \"a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d\": container with ID starting with a2d37518bc879b17d5c7786eb431d3f057230fc901474d2338d5ea6d96c9ca2d not found: ID does not exist" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.664521 4711 scope.go:117] "RemoveContainer" containerID="54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb" Dec 05 12:31:07 crc kubenswrapper[4711]: E1205 12:31:07.671546 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb\": container with ID starting with 54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb not found: ID does not exist" containerID="54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.671581 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb"} err="failed to get container status \"54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb\": rpc error: code = NotFound desc = could not find container \"54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb\": container with ID starting with 54ca11270ba3a61ae64ae8cc408539fa8c6bd89052bf65eeffda1180070aa8cb not found: ID does not exist" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.705806 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7684da94-d794-4cbd-accc-224b649a1c14" (UID: "7684da94-d794-4cbd-accc-224b649a1c14"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.722022 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.734050 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7684da94-d794-4cbd-accc-224b649a1c14" (UID: "7684da94-d794-4cbd-accc-224b649a1c14"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.788838 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7684da94-d794-4cbd-accc-224b649a1c14" (UID: "7684da94-d794-4cbd-accc-224b649a1c14"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.824516 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.824757 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.832257 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7684da94-d794-4cbd-accc-224b649a1c14" (UID: "7684da94-d794-4cbd-accc-224b649a1c14"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.855860 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-config" (OuterVolumeSpecName: "config") pod "7684da94-d794-4cbd-accc-224b649a1c14" (UID: "7684da94-d794-4cbd-accc-224b649a1c14"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.926891 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:07 crc kubenswrapper[4711]: I1205 12:31:07.926921 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7684da94-d794-4cbd-accc-224b649a1c14-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.016059 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.131793 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-run-httpd\") pod \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.132184 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-log-httpd\") pod \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.132304 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-scripts\") pod \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.132431 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-sg-core-conf-yaml\") pod \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.132487 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-config-data\") pod \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.132545 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb8lx\" (UniqueName: \"kubernetes.io/projected/a059dfe5-97d3-412e-b70b-430bd3ab92b9-kube-api-access-hb8lx\") pod \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.132567 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-combined-ca-bundle\") pod \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\" (UID: \"a059dfe5-97d3-412e-b70b-430bd3ab92b9\") " Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.132679 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a059dfe5-97d3-412e-b70b-430bd3ab92b9" (UID: "a059dfe5-97d3-412e-b70b-430bd3ab92b9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.132792 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a059dfe5-97d3-412e-b70b-430bd3ab92b9" (UID: "a059dfe5-97d3-412e-b70b-430bd3ab92b9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.133105 4711 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.133131 4711 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a059dfe5-97d3-412e-b70b-430bd3ab92b9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.142301 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-scripts" (OuterVolumeSpecName: "scripts") pod "a059dfe5-97d3-412e-b70b-430bd3ab92b9" (UID: "a059dfe5-97d3-412e-b70b-430bd3ab92b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.152730 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a059dfe5-97d3-412e-b70b-430bd3ab92b9-kube-api-access-hb8lx" (OuterVolumeSpecName: "kube-api-access-hb8lx") pod "a059dfe5-97d3-412e-b70b-430bd3ab92b9" (UID: "a059dfe5-97d3-412e-b70b-430bd3ab92b9"). InnerVolumeSpecName "kube-api-access-hb8lx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.188057 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7745988997-jb8k5"] Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.192902 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a059dfe5-97d3-412e-b70b-430bd3ab92b9" (UID: "a059dfe5-97d3-412e-b70b-430bd3ab92b9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.225493 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7745988997-jb8k5"] Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.233791 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5885f55d9b-n67kv" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.235678 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.236008 4711 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.236402 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb8lx\" (UniqueName: \"kubernetes.io/projected/a059dfe5-97d3-412e-b70b-430bd3ab92b9-kube-api-access-hb8lx\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.333190 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-547b54f9fb-c79cw"] Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.333480 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-547b54f9fb-c79cw" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api-log" containerID="cri-o://68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6" gracePeriod=30 Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.334045 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-547b54f9fb-c79cw" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api" containerID="cri-o://43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972" gracePeriod=30 Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.371337 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a059dfe5-97d3-412e-b70b-430bd3ab92b9" (UID: "a059dfe5-97d3-412e-b70b-430bd3ab92b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.410922 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-config-data" (OuterVolumeSpecName: "config-data") pod "a059dfe5-97d3-412e-b70b-430bd3ab92b9" (UID: "a059dfe5-97d3-412e-b70b-430bd3ab92b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.449878 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.449920 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a059dfe5-97d3-412e-b70b-430bd3ab92b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.489146 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d382a84e-a077-4aef-beac-c6be4347ebc3","Type":"ContainerStarted","Data":"09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888"} Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.496581 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29","Type":"ContainerStarted","Data":"e83ece53a0980a332f4d448350613ae812f0ea913ff4f5bf0022fc050c72dd64"} Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.498266 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a059dfe5-97d3-412e-b70b-430bd3ab92b9","Type":"ContainerDied","Data":"a1d0ce754db2dd80247b96db2dc906d145b888ed7726b02523cef4b3ef318f27"} Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.498292 4711 scope.go:117] "RemoveContainer" containerID="c129a0798110b72322c603984a291548de03fe231fb2d3c5c2ef0b8c8ca0577a" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.498466 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.591972 4711 scope.go:117] "RemoveContainer" containerID="9235ed2ac35f39d072b75d4a15a0434260525cdfdb0838e2d3c5a87670f1dce9" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.617754 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.636465 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.661836 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:08 crc kubenswrapper[4711]: E1205 12:31:08.662284 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="proxy-httpd" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.662298 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="proxy-httpd" Dec 05 12:31:08 crc kubenswrapper[4711]: E1205 12:31:08.662314 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="ceilometer-notification-agent" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.662319 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="ceilometer-notification-agent" Dec 05 12:31:08 crc kubenswrapper[4711]: E1205 12:31:08.662352 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7684da94-d794-4cbd-accc-224b649a1c14" containerName="init" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.662360 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="7684da94-d794-4cbd-accc-224b649a1c14" containerName="init" Dec 05 12:31:08 crc kubenswrapper[4711]: E1205 12:31:08.662368 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7684da94-d794-4cbd-accc-224b649a1c14" containerName="dnsmasq-dns" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.662376 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="7684da94-d794-4cbd-accc-224b649a1c14" containerName="dnsmasq-dns" Dec 05 12:31:08 crc kubenswrapper[4711]: E1205 12:31:08.662505 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="sg-core" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.662517 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="sg-core" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.662754 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="sg-core" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.662775 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="proxy-httpd" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.662789 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" containerName="ceilometer-notification-agent" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.662815 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="7684da94-d794-4cbd-accc-224b649a1c14" containerName="dnsmasq-dns" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.665228 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.668600 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.669159 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.706291 4711 scope.go:117] "RemoveContainer" containerID="454c5c2c8669dfd6e10ce7d7bb01e3db1fc9bd864e5c7e05ed5b825fbe3bed20" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.765774 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7684da94-d794-4cbd-accc-224b649a1c14" path="/var/lib/kubelet/pods/7684da94-d794-4cbd-accc-224b649a1c14/volumes" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.766586 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a059dfe5-97d3-412e-b70b-430bd3ab92b9" path="/var/lib/kubelet/pods/a059dfe5-97d3-412e-b70b-430bd3ab92b9/volumes" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.767176 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.767584 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.767613 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-config-data\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.767687 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-run-httpd\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.767718 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-scripts\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.767805 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-log-httpd\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.767856 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.767870 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn9cv\" (UniqueName: \"kubernetes.io/projected/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-kube-api-access-qn9cv\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.869495 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.869750 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-config-data\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.869802 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-run-httpd\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.869825 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-scripts\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.869892 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-log-httpd\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.869930 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.869947 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn9cv\" (UniqueName: \"kubernetes.io/projected/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-kube-api-access-qn9cv\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.882667 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-log-httpd\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.884333 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-run-httpd\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.892372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.913214 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn9cv\" (UniqueName: \"kubernetes.io/projected/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-kube-api-access-qn9cv\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.914209 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-config-data\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.925058 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:08 crc kubenswrapper[4711]: I1205 12:31:08.928939 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-scripts\") pod \"ceilometer-0\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " pod="openstack/ceilometer-0" Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.009865 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.254748 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.175:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.416625 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.500279 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.531198 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerDied","Data":"b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74"} Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.531626 4711 scope.go:117] "RemoveContainer" containerID="7c9787731bc32175de19ee5ab017cbe5383840f61fbb915edbfb4e49a571c912" Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.537746 4711 generic.go:334] "Generic (PLEG): container finished" podID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerID="b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74" exitCode=1 Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.592059 4711 scope.go:117] "RemoveContainer" containerID="b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74" Dec 05 12:31:09 crc kubenswrapper[4711]: E1205 12:31:09.593516 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.615907 4711 generic.go:334] "Generic (PLEG): container finished" podID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerID="68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6" exitCode=143 Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.616161 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerName="cinder-scheduler" containerID="cri-o://0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a" gracePeriod=30 Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.616500 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-547b54f9fb-c79cw" event={"ID":"19a491f3-df0d-4517-8299-408d5c3cb6be","Type":"ContainerDied","Data":"68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6"} Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.616518 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerName="probe" containerID="cri-o://b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735" gracePeriod=30 Dec 05 12:31:09 crc kubenswrapper[4711]: I1205 12:31:09.820110 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:09 crc kubenswrapper[4711]: E1205 12:31:09.867173 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f496d6f_8883_493b_866b_117f6b7537e4.slice/crio-52c3df0f2f8b4be4f732bafb25b134cd6ceaaf9ed61888a915be10930b7a2042\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fe97f8e_7a0e_40ef_8cab_3530224b79ee.slice/crio-b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fe97f8e_7a0e_40ef_8cab_3530224b79ee.slice/crio-conmon-b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74.scope\": RecentStats: unable to find data in memory cache]" Dec 05 12:31:10 crc kubenswrapper[4711]: I1205 12:31:10.631182 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerStarted","Data":"02536a0546d6c26e5e5fafcc2f4688c7e4997ded4c09910ce80b50f496c5752e"} Dec 05 12:31:10 crc kubenswrapper[4711]: I1205 12:31:10.631427 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerStarted","Data":"d667f6786117206f467e41e9af94c2737c49965764a2c1b99fe8df72e6c6fc08"} Dec 05 12:31:10 crc kubenswrapper[4711]: I1205 12:31:10.640762 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d382a84e-a077-4aef-beac-c6be4347ebc3","Type":"ContainerStarted","Data":"e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89"} Dec 05 12:31:10 crc kubenswrapper[4711]: I1205 12:31:10.642841 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29","Type":"ContainerStarted","Data":"dc69e40bb76139c0eee1cd93626080cba7d3ef950c8213c9c1bfb230457117d0"} Dec 05 12:31:10 crc kubenswrapper[4711]: I1205 12:31:10.678123 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.678102818 podStartE2EDuration="5.678102818s" podCreationTimestamp="2025-12-05 12:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:10.662966467 +0000 UTC m=+1316.247288797" watchObservedRunningTime="2025-12-05 12:31:10.678102818 +0000 UTC m=+1316.262425148" Dec 05 12:31:10 crc kubenswrapper[4711]: I1205 12:31:10.851974 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:31:10 crc kubenswrapper[4711]: I1205 12:31:10.852279 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:31:10 crc kubenswrapper[4711]: I1205 12:31:10.852737 4711 scope.go:117] "RemoveContainer" containerID="b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74" Dec 05 12:31:10 crc kubenswrapper[4711]: E1205 12:31:10.852984 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:31:10 crc kubenswrapper[4711]: I1205 12:31:10.871183 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.871163322 podStartE2EDuration="5.871163322s" podCreationTimestamp="2025-12-05 12:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:10.705276814 +0000 UTC m=+1316.289599174" watchObservedRunningTime="2025-12-05 12:31:10.871163322 +0000 UTC m=+1316.455485652" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.309703 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.472555 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data\") pod \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.472601 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-scripts\") pod \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.472647 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d348b707-8d7c-4cbe-94d6-63a20efc6e54-etc-machine-id\") pod \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.472666 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data-custom\") pod \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.472721 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-combined-ca-bundle\") pod \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.472753 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qf2h6\" (UniqueName: \"kubernetes.io/projected/d348b707-8d7c-4cbe-94d6-63a20efc6e54-kube-api-access-qf2h6\") pod \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\" (UID: \"d348b707-8d7c-4cbe-94d6-63a20efc6e54\") " Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.474289 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d348b707-8d7c-4cbe-94d6-63a20efc6e54-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d348b707-8d7c-4cbe-94d6-63a20efc6e54" (UID: "d348b707-8d7c-4cbe-94d6-63a20efc6e54"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.481502 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d348b707-8d7c-4cbe-94d6-63a20efc6e54" (UID: "d348b707-8d7c-4cbe-94d6-63a20efc6e54"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.494536 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-scripts" (OuterVolumeSpecName: "scripts") pod "d348b707-8d7c-4cbe-94d6-63a20efc6e54" (UID: "d348b707-8d7c-4cbe-94d6-63a20efc6e54"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.524571 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d348b707-8d7c-4cbe-94d6-63a20efc6e54-kube-api-access-qf2h6" (OuterVolumeSpecName: "kube-api-access-qf2h6") pod "d348b707-8d7c-4cbe-94d6-63a20efc6e54" (UID: "d348b707-8d7c-4cbe-94d6-63a20efc6e54"). InnerVolumeSpecName "kube-api-access-qf2h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.575868 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.575899 4711 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d348b707-8d7c-4cbe-94d6-63a20efc6e54-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.575910 4711 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.575919 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qf2h6\" (UniqueName: \"kubernetes.io/projected/d348b707-8d7c-4cbe-94d6-63a20efc6e54-kube-api-access-qf2h6\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.584190 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d348b707-8d7c-4cbe-94d6-63a20efc6e54" (UID: "d348b707-8d7c-4cbe-94d6-63a20efc6e54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.642769 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-547b54f9fb-c79cw" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.172:9311/healthcheck\": read tcp 10.217.0.2:52702->10.217.0.172:9311: read: connection reset by peer" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.642833 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-547b54f9fb-c79cw" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.172:9311/healthcheck\": read tcp 10.217.0.2:52718->10.217.0.172:9311: read: connection reset by peer" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.670990 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerStarted","Data":"5e6b54581d2d2252b28d769f750de0c65ea81a17b3f9bdef88a6ef2582766fa1"} Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.677228 4711 generic.go:334] "Generic (PLEG): container finished" podID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerID="b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735" exitCode=0 Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.677258 4711 generic.go:334] "Generic (PLEG): container finished" podID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerID="0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a" exitCode=0 Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.677667 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.677861 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.677972 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d348b707-8d7c-4cbe-94d6-63a20efc6e54","Type":"ContainerDied","Data":"b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735"} Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.677998 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d348b707-8d7c-4cbe-94d6-63a20efc6e54","Type":"ContainerDied","Data":"0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a"} Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.678009 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d348b707-8d7c-4cbe-94d6-63a20efc6e54","Type":"ContainerDied","Data":"02e9cb8a17c35e7a262d6a9f9477dcb7ddfe25d62b1e7b7b2a712499b6a42241"} Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.678025 4711 scope.go:117] "RemoveContainer" containerID="b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.685547 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data" (OuterVolumeSpecName: "config-data") pod "d348b707-8d7c-4cbe-94d6-63a20efc6e54" (UID: "d348b707-8d7c-4cbe-94d6-63a20efc6e54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.735724 4711 scope.go:117] "RemoveContainer" containerID="0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.778781 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d348b707-8d7c-4cbe-94d6-63a20efc6e54-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.828379 4711 scope.go:117] "RemoveContainer" containerID="b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735" Dec 05 12:31:11 crc kubenswrapper[4711]: E1205 12:31:11.828869 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735\": container with ID starting with b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735 not found: ID does not exist" containerID="b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.828906 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735"} err="failed to get container status \"b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735\": rpc error: code = NotFound desc = could not find container \"b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735\": container with ID starting with b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735 not found: ID does not exist" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.828932 4711 scope.go:117] "RemoveContainer" containerID="0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a" Dec 05 12:31:11 crc kubenswrapper[4711]: E1205 12:31:11.831052 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a\": container with ID starting with 0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a not found: ID does not exist" containerID="0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.831094 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a"} err="failed to get container status \"0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a\": rpc error: code = NotFound desc = could not find container \"0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a\": container with ID starting with 0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a not found: ID does not exist" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.831113 4711 scope.go:117] "RemoveContainer" containerID="b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.831328 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735"} err="failed to get container status \"b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735\": rpc error: code = NotFound desc = could not find container \"b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735\": container with ID starting with b2d4d4d21a860172c32ff6859c790dc3c452a93fa828a86c3647e438cb371735 not found: ID does not exist" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.831342 4711 scope.go:117] "RemoveContainer" containerID="0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a" Dec 05 12:31:11 crc kubenswrapper[4711]: I1205 12:31:11.831630 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a"} err="failed to get container status \"0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a\": rpc error: code = NotFound desc = could not find container \"0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a\": container with ID starting with 0d58f8747757d2541e2538759db9aca2c7a39e124c89496509de36e5f363f39a not found: ID does not exist" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.052244 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.058892 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7d94746d9d-m4k6w" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.088179 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.107584 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.135800 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:31:12 crc kubenswrapper[4711]: E1205 12:31:12.136640 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerName="probe" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.136663 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerName="probe" Dec 05 12:31:12 crc kubenswrapper[4711]: E1205 12:31:12.136695 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerName="cinder-scheduler" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.136704 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerName="cinder-scheduler" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.136971 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerName="probe" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.136995 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" containerName="cinder-scheduler" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.138467 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.143544 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.157475 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.287856 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-scripts\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.287907 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-config-data\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.287932 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62zh4\" (UniqueName: \"kubernetes.io/projected/ed1a3b33-3fb6-412a-8bde-03171358617c-kube-api-access-62zh4\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.287960 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed1a3b33-3fb6-412a-8bde-03171358617c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.287982 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.288076 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.352429 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.390100 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-scripts\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.390146 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-config-data\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.390175 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62zh4\" (UniqueName: \"kubernetes.io/projected/ed1a3b33-3fb6-412a-8bde-03171358617c-kube-api-access-62zh4\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.390216 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed1a3b33-3fb6-412a-8bde-03171358617c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.390243 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.390333 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.391049 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed1a3b33-3fb6-412a-8bde-03171358617c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.397450 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.397666 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-config-data\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.397961 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-scripts\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.407010 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed1a3b33-3fb6-412a-8bde-03171358617c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.408069 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62zh4\" (UniqueName: \"kubernetes.io/projected/ed1a3b33-3fb6-412a-8bde-03171358617c-kube-api-access-62zh4\") pod \"cinder-scheduler-0\" (UID: \"ed1a3b33-3fb6-412a-8bde-03171358617c\") " pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.481189 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.491726 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19a491f3-df0d-4517-8299-408d5c3cb6be-logs\") pod \"19a491f3-df0d-4517-8299-408d5c3cb6be\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.491835 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzbn2\" (UniqueName: \"kubernetes.io/projected/19a491f3-df0d-4517-8299-408d5c3cb6be-kube-api-access-bzbn2\") pod \"19a491f3-df0d-4517-8299-408d5c3cb6be\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.491891 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data\") pod \"19a491f3-df0d-4517-8299-408d5c3cb6be\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.492032 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-combined-ca-bundle\") pod \"19a491f3-df0d-4517-8299-408d5c3cb6be\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.492104 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data-custom\") pod \"19a491f3-df0d-4517-8299-408d5c3cb6be\" (UID: \"19a491f3-df0d-4517-8299-408d5c3cb6be\") " Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.492274 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19a491f3-df0d-4517-8299-408d5c3cb6be-logs" (OuterVolumeSpecName: "logs") pod "19a491f3-df0d-4517-8299-408d5c3cb6be" (UID: "19a491f3-df0d-4517-8299-408d5c3cb6be"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.492728 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19a491f3-df0d-4517-8299-408d5c3cb6be-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.513686 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "19a491f3-df0d-4517-8299-408d5c3cb6be" (UID: "19a491f3-df0d-4517-8299-408d5c3cb6be"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.535845 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19a491f3-df0d-4517-8299-408d5c3cb6be-kube-api-access-bzbn2" (OuterVolumeSpecName: "kube-api-access-bzbn2") pod "19a491f3-df0d-4517-8299-408d5c3cb6be" (UID: "19a491f3-df0d-4517-8299-408d5c3cb6be"). InnerVolumeSpecName "kube-api-access-bzbn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.563636 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data" (OuterVolumeSpecName: "config-data") pod "19a491f3-df0d-4517-8299-408d5c3cb6be" (UID: "19a491f3-df0d-4517-8299-408d5c3cb6be"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.596581 4711 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.596620 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzbn2\" (UniqueName: \"kubernetes.io/projected/19a491f3-df0d-4517-8299-408d5c3cb6be-kube-api-access-bzbn2\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.596631 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.599475 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19a491f3-df0d-4517-8299-408d5c3cb6be" (UID: "19a491f3-df0d-4517-8299-408d5c3cb6be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.724870 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a491f3-df0d-4517-8299-408d5c3cb6be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.738644 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d348b707-8d7c-4cbe-94d6-63a20efc6e54" path="/var/lib/kubelet/pods/d348b707-8d7c-4cbe-94d6-63a20efc6e54/volumes" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.741883 4711 generic.go:334] "Generic (PLEG): container finished" podID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerID="43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972" exitCode=0 Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.742295 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-547b54f9fb-c79cw" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.746439 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-547b54f9fb-c79cw" event={"ID":"19a491f3-df0d-4517-8299-408d5c3cb6be","Type":"ContainerDied","Data":"43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972"} Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.746497 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-547b54f9fb-c79cw" event={"ID":"19a491f3-df0d-4517-8299-408d5c3cb6be","Type":"ContainerDied","Data":"fea785e110d9f80ad284d4e6079e8208ae542f3900c917da6502d7838d757eac"} Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.746527 4711 scope.go:117] "RemoveContainer" containerID="43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.764772 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerStarted","Data":"f5aa3bc1c98d98ca2f48b817861933f7c7da08dc166c43d5e28511f15f46203d"} Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.817210 4711 scope.go:117] "RemoveContainer" containerID="68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.820279 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.835990 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-547b54f9fb-c79cw"] Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.851141 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-547b54f9fb-c79cw"] Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.915517 4711 scope.go:117] "RemoveContainer" containerID="43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972" Dec 05 12:31:12 crc kubenswrapper[4711]: E1205 12:31:12.916160 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972\": container with ID starting with 43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972 not found: ID does not exist" containerID="43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.916201 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972"} err="failed to get container status \"43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972\": rpc error: code = NotFound desc = could not find container \"43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972\": container with ID starting with 43818a4a904462f49669f9a44c81641f0a54df59382cedfd32bd14b56e892972 not found: ID does not exist" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.916228 4711 scope.go:117] "RemoveContainer" containerID="68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6" Dec 05 12:31:12 crc kubenswrapper[4711]: E1205 12:31:12.916607 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6\": container with ID starting with 68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6 not found: ID does not exist" containerID="68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.916641 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6"} err="failed to get container status \"68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6\": rpc error: code = NotFound desc = could not find container \"68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6\": container with ID starting with 68f17ea60c21435c47620e4924a23bf07cb942397775f672a62e5cfcf2b850f6 not found: ID does not exist" Dec 05 12:31:12 crc kubenswrapper[4711]: I1205 12:31:12.987479 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:31:13 crc kubenswrapper[4711]: W1205 12:31:12.993558 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded1a3b33_3fb6_412a_8bde_03171358617c.slice/crio-1047d3af6b3220b71e94d5c11b9ccd7dea50dbc21062696a8603f5da4a58bf23 WatchSource:0}: Error finding container 1047d3af6b3220b71e94d5c11b9ccd7dea50dbc21062696a8603f5da4a58bf23: Status 404 returned error can't find the container with id 1047d3af6b3220b71e94d5c11b9ccd7dea50dbc21062696a8603f5da4a58bf23 Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.809205 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ed1a3b33-3fb6-412a-8bde-03171358617c","Type":"ContainerStarted","Data":"1047d3af6b3220b71e94d5c11b9ccd7dea50dbc21062696a8603f5da4a58bf23"} Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.821526 4711 generic.go:334] "Generic (PLEG): container finished" podID="53844e89-65d7-4cbc-b375-dbfef360857b" containerID="a8b2276036c2d8555cc86ef88f650eefe07004dceb19cb735a3726cc375238af" exitCode=137 Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.821580 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-744fd5f788-bs9bc" event={"ID":"53844e89-65d7-4cbc-b375-dbfef360857b","Type":"ContainerDied","Data":"a8b2276036c2d8555cc86ef88f650eefe07004dceb19cb735a3726cc375238af"} Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.920934 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.956529 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-tls-certs\") pod \"53844e89-65d7-4cbc-b375-dbfef360857b\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.956843 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-secret-key\") pod \"53844e89-65d7-4cbc-b375-dbfef360857b\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.956972 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-combined-ca-bundle\") pod \"53844e89-65d7-4cbc-b375-dbfef360857b\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.957062 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vhc6\" (UniqueName: \"kubernetes.io/projected/53844e89-65d7-4cbc-b375-dbfef360857b-kube-api-access-7vhc6\") pod \"53844e89-65d7-4cbc-b375-dbfef360857b\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.957168 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53844e89-65d7-4cbc-b375-dbfef360857b-logs\") pod \"53844e89-65d7-4cbc-b375-dbfef360857b\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.957265 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-config-data\") pod \"53844e89-65d7-4cbc-b375-dbfef360857b\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.957340 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-scripts\") pod \"53844e89-65d7-4cbc-b375-dbfef360857b\" (UID: \"53844e89-65d7-4cbc-b375-dbfef360857b\") " Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.960167 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53844e89-65d7-4cbc-b375-dbfef360857b-logs" (OuterVolumeSpecName: "logs") pod "53844e89-65d7-4cbc-b375-dbfef360857b" (UID: "53844e89-65d7-4cbc-b375-dbfef360857b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.963823 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53844e89-65d7-4cbc-b375-dbfef360857b-kube-api-access-7vhc6" (OuterVolumeSpecName: "kube-api-access-7vhc6") pod "53844e89-65d7-4cbc-b375-dbfef360857b" (UID: "53844e89-65d7-4cbc-b375-dbfef360857b"). InnerVolumeSpecName "kube-api-access-7vhc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:13 crc kubenswrapper[4711]: I1205 12:31:13.966624 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "53844e89-65d7-4cbc-b375-dbfef360857b" (UID: "53844e89-65d7-4cbc-b375-dbfef360857b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.000825 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-scripts" (OuterVolumeSpecName: "scripts") pod "53844e89-65d7-4cbc-b375-dbfef360857b" (UID: "53844e89-65d7-4cbc-b375-dbfef360857b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.004567 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53844e89-65d7-4cbc-b375-dbfef360857b" (UID: "53844e89-65d7-4cbc-b375-dbfef360857b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.034009 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-config-data" (OuterVolumeSpecName: "config-data") pod "53844e89-65d7-4cbc-b375-dbfef360857b" (UID: "53844e89-65d7-4cbc-b375-dbfef360857b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.036911 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "53844e89-65d7-4cbc-b375-dbfef360857b" (UID: "53844e89-65d7-4cbc-b375-dbfef360857b"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.059143 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.059176 4711 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.059188 4711 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.059199 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53844e89-65d7-4cbc-b375-dbfef360857b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.059210 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vhc6\" (UniqueName: \"kubernetes.io/projected/53844e89-65d7-4cbc-b375-dbfef360857b-kube-api-access-7vhc6\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.059221 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53844e89-65d7-4cbc-b375-dbfef360857b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.059231 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53844e89-65d7-4cbc-b375-dbfef360857b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.699002 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" path="/var/lib/kubelet/pods/19a491f3-df0d-4517-8299-408d5c3cb6be/volumes" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.844630 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ed1a3b33-3fb6-412a-8bde-03171358617c","Type":"ContainerStarted","Data":"1deb51448de40c85e33fa510d3b69aa71210558fba3e0a5c1b96759c6a5f0c34"} Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.848618 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-744fd5f788-bs9bc" event={"ID":"53844e89-65d7-4cbc-b375-dbfef360857b","Type":"ContainerDied","Data":"1a19ff5754cad182a10b1c0214a8f9acedb0a7693526158772580b30bf8d9223"} Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.848649 4711 scope.go:117] "RemoveContainer" containerID="d8992c9aa5235fde93e9e62efbeb7b298a387f545314aa2553dcff5475ec8e93" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.848740 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-744fd5f788-bs9bc" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.858693 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerStarted","Data":"0ecd3d9bfeea93cb82b3284864193842a505c0c8d0313f5bc1352f4e33cbf4b2"} Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.859492 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.877685 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-744fd5f788-bs9bc"] Dec 05 12:31:14 crc kubenswrapper[4711]: I1205 12:31:14.885640 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-744fd5f788-bs9bc"] Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.052661 4711 scope.go:117] "RemoveContainer" containerID="a8b2276036c2d8555cc86ef88f650eefe07004dceb19cb735a3726cc375238af" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.744626 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.744887 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.839530 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.839891 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.839951 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.869596 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.088723388 podStartE2EDuration="7.869580748s" podCreationTimestamp="2025-12-05 12:31:08 +0000 UTC" firstStartedPulling="2025-12-05 12:31:09.858172983 +0000 UTC m=+1315.442495323" lastFinishedPulling="2025-12-05 12:31:13.639030353 +0000 UTC m=+1319.223352683" observedRunningTime="2025-12-05 12:31:14.897148472 +0000 UTC m=+1320.481470812" watchObservedRunningTime="2025-12-05 12:31:15.869580748 +0000 UTC m=+1321.453903078" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.879197 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ed1a3b33-3fb6-412a-8bde-03171358617c","Type":"ContainerStarted","Data":"8894ec824b466445a7567019b38f2b0655e78cc1f6fc6daebb92f85018183957"} Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.883792 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.893605 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.896449 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.896740 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.901535 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 12:31:15 crc kubenswrapper[4711]: I1205 12:31:15.920662 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.920642069 podStartE2EDuration="3.920642069s" podCreationTimestamp="2025-12-05 12:31:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:15.901741046 +0000 UTC m=+1321.486063396" watchObservedRunningTime="2025-12-05 12:31:15.920642069 +0000 UTC m=+1321.504964399" Dec 05 12:31:16 crc kubenswrapper[4711]: I1205 12:31:16.701169 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" path="/var/lib/kubelet/pods/53844e89-65d7-4cbc-b375-dbfef360857b/volumes" Dec 05 12:31:16 crc kubenswrapper[4711]: I1205 12:31:16.891038 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 12:31:16 crc kubenswrapper[4711]: I1205 12:31:16.891077 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.482227 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.495516 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5d757fb76c-sptb4"] Dec 05 12:31:17 crc kubenswrapper[4711]: E1205 12:31:17.496017 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.496040 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon" Dec 05 12:31:17 crc kubenswrapper[4711]: E1205 12:31:17.496061 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon-log" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.496071 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon-log" Dec 05 12:31:17 crc kubenswrapper[4711]: E1205 12:31:17.496081 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api-log" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.496089 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api-log" Dec 05 12:31:17 crc kubenswrapper[4711]: E1205 12:31:17.496117 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.496124 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.496443 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.496474 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="19a491f3-df0d-4517-8299-408d5c3cb6be" containerName="barbican-api-log" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.496491 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.496504 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="53844e89-65d7-4cbc-b375-dbfef360857b" containerName="horizon-log" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.497804 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.500378 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.500459 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.500463 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.525292 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5d757fb76c-sptb4"] Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.539914 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-public-tls-certs\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.539967 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-internal-tls-certs\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.539994 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c319c3a5-f67a-47d7-bfe3-8e874cf01471-run-httpd\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.540032 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c319c3a5-f67a-47d7-bfe3-8e874cf01471-log-httpd\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.540063 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-config-data\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.540115 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-combined-ca-bundle\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.540136 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c319c3a5-f67a-47d7-bfe3-8e874cf01471-etc-swift\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.540188 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcbgb\" (UniqueName: \"kubernetes.io/projected/c319c3a5-f67a-47d7-bfe3-8e874cf01471-kube-api-access-kcbgb\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.642025 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-config-data\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.642105 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-combined-ca-bundle\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.642125 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c319c3a5-f67a-47d7-bfe3-8e874cf01471-etc-swift\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.642175 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcbgb\" (UniqueName: \"kubernetes.io/projected/c319c3a5-f67a-47d7-bfe3-8e874cf01471-kube-api-access-kcbgb\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.642215 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-public-tls-certs\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.642244 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-internal-tls-certs\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.642271 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c319c3a5-f67a-47d7-bfe3-8e874cf01471-run-httpd\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.642303 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c319c3a5-f67a-47d7-bfe3-8e874cf01471-log-httpd\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.642823 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c319c3a5-f67a-47d7-bfe3-8e874cf01471-log-httpd\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.646771 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c319c3a5-f67a-47d7-bfe3-8e874cf01471-run-httpd\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.655189 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-public-tls-certs\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.655446 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-config-data\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.661115 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcbgb\" (UniqueName: \"kubernetes.io/projected/c319c3a5-f67a-47d7-bfe3-8e874cf01471-kube-api-access-kcbgb\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.662259 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-internal-tls-certs\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.663286 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c319c3a5-f67a-47d7-bfe3-8e874cf01471-etc-swift\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.663695 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c319c3a5-f67a-47d7-bfe3-8e874cf01471-combined-ca-bundle\") pod \"swift-proxy-5d757fb76c-sptb4\" (UID: \"c319c3a5-f67a-47d7-bfe3-8e874cf01471\") " pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.818332 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.906973 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:31:17 crc kubenswrapper[4711]: I1205 12:31:17.906998 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:31:18 crc kubenswrapper[4711]: I1205 12:31:18.301331 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:31:18 crc kubenswrapper[4711]: I1205 12:31:18.301406 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:31:18 crc kubenswrapper[4711]: I1205 12:31:18.922662 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:31:18 crc kubenswrapper[4711]: I1205 12:31:18.923006 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.186298 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.186862 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="ceilometer-central-agent" containerID="cri-o://02536a0546d6c26e5e5fafcc2f4688c7e4997ded4c09910ce80b50f496c5752e" gracePeriod=30 Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.187334 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="proxy-httpd" containerID="cri-o://0ecd3d9bfeea93cb82b3284864193842a505c0c8d0313f5bc1352f4e33cbf4b2" gracePeriod=30 Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.187561 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="ceilometer-notification-agent" containerID="cri-o://5e6b54581d2d2252b28d769f750de0c65ea81a17b3f9bdef88a6ef2582766fa1" gracePeriod=30 Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.187617 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="sg-core" containerID="cri-o://f5aa3bc1c98d98ca2f48b817861933f7c7da08dc166c43d5e28511f15f46203d" gracePeriod=30 Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.656828 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.656940 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.716004 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.716694 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.746740 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.761285 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.851693 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.852469 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.852536 4711 scope.go:117] "RemoveContainer" containerID="b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74" Dec 05 12:31:20 crc kubenswrapper[4711]: E1205 12:31:20.853065 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.952690 4711 generic.go:334] "Generic (PLEG): container finished" podID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerID="0ecd3d9bfeea93cb82b3284864193842a505c0c8d0313f5bc1352f4e33cbf4b2" exitCode=0 Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.952733 4711 generic.go:334] "Generic (PLEG): container finished" podID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerID="f5aa3bc1c98d98ca2f48b817861933f7c7da08dc166c43d5e28511f15f46203d" exitCode=2 Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.952771 4711 generic.go:334] "Generic (PLEG): container finished" podID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerID="5e6b54581d2d2252b28d769f750de0c65ea81a17b3f9bdef88a6ef2582766fa1" exitCode=0 Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.952784 4711 generic.go:334] "Generic (PLEG): container finished" podID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerID="02536a0546d6c26e5e5fafcc2f4688c7e4997ded4c09910ce80b50f496c5752e" exitCode=0 Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.952775 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerDied","Data":"0ecd3d9bfeea93cb82b3284864193842a505c0c8d0313f5bc1352f4e33cbf4b2"} Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.952869 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerDied","Data":"f5aa3bc1c98d98ca2f48b817861933f7c7da08dc166c43d5e28511f15f46203d"} Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.952880 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerDied","Data":"5e6b54581d2d2252b28d769f750de0c65ea81a17b3f9bdef88a6ef2582766fa1"} Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.952892 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerDied","Data":"02536a0546d6c26e5e5fafcc2f4688c7e4997ded4c09910ce80b50f496c5752e"} Dec 05 12:31:20 crc kubenswrapper[4711]: I1205 12:31:20.954292 4711 scope.go:117] "RemoveContainer" containerID="b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74" Dec 05 12:31:20 crc kubenswrapper[4711]: E1205 12:31:20.954511 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:31:22 crc kubenswrapper[4711]: I1205 12:31:22.692848 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 12:31:26 crc kubenswrapper[4711]: I1205 12:31:26.976271 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.019061 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5","Type":"ContainerDied","Data":"d667f6786117206f467e41e9af94c2737c49965764a2c1b99fe8df72e6c6fc08"} Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.019119 4711 scope.go:117] "RemoveContainer" containerID="0ecd3d9bfeea93cb82b3284864193842a505c0c8d0313f5bc1352f4e33cbf4b2" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.019257 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.039361 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-scripts\") pod \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.039474 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-run-httpd\") pod \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.039511 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qn9cv\" (UniqueName: \"kubernetes.io/projected/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-kube-api-access-qn9cv\") pod \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.039608 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-sg-core-conf-yaml\") pod \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.039705 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-log-httpd\") pod \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.039761 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-combined-ca-bundle\") pod \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.039784 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-config-data\") pod \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\" (UID: \"5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5\") " Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.041579 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" (UID: "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.046674 4711 scope.go:117] "RemoveContainer" containerID="f5aa3bc1c98d98ca2f48b817861933f7c7da08dc166c43d5e28511f15f46203d" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.048809 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-scripts" (OuterVolumeSpecName: "scripts") pod "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" (UID: "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.049019 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" (UID: "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.050441 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-kube-api-access-qn9cv" (OuterVolumeSpecName: "kube-api-access-qn9cv") pod "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" (UID: "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5"). InnerVolumeSpecName "kube-api-access-qn9cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.080635 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" (UID: "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.086157 4711 scope.go:117] "RemoveContainer" containerID="5e6b54581d2d2252b28d769f750de0c65ea81a17b3f9bdef88a6ef2582766fa1" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.117859 4711 scope.go:117] "RemoveContainer" containerID="02536a0546d6c26e5e5fafcc2f4688c7e4997ded4c09910ce80b50f496c5752e" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.145915 4711 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.145956 4711 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.145970 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.145980 4711 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.145992 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qn9cv\" (UniqueName: \"kubernetes.io/projected/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-kube-api-access-qn9cv\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.171336 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-config-data" (OuterVolumeSpecName: "config-data") pod "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" (UID: "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.173498 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" (UID: "5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.216461 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5d757fb76c-sptb4"] Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.247162 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.247483 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.359196 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.379344 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.389310 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:27 crc kubenswrapper[4711]: E1205 12:31:27.389882 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="ceilometer-notification-agent" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.389901 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="ceilometer-notification-agent" Dec 05 12:31:27 crc kubenswrapper[4711]: E1205 12:31:27.389921 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="ceilometer-central-agent" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.389929 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="ceilometer-central-agent" Dec 05 12:31:27 crc kubenswrapper[4711]: E1205 12:31:27.389942 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="sg-core" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.389951 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="sg-core" Dec 05 12:31:27 crc kubenswrapper[4711]: E1205 12:31:27.389989 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="proxy-httpd" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.389997 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="proxy-httpd" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.390267 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="ceilometer-notification-agent" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.390284 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="ceilometer-central-agent" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.390302 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="proxy-httpd" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.390328 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" containerName="sg-core" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.392904 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.397039 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.397090 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.420351 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.451915 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-log-httpd\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.452091 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxf9z\" (UniqueName: \"kubernetes.io/projected/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-kube-api-access-gxf9z\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.452283 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-run-httpd\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.452330 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-scripts\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.452485 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-config-data\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.452525 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.452546 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.553955 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-log-httpd\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.554003 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxf9z\" (UniqueName: \"kubernetes.io/projected/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-kube-api-access-gxf9z\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.554047 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-run-httpd\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.554088 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-scripts\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.554142 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-config-data\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.554158 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.554176 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.555044 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-run-httpd\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.555259 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-log-httpd\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.558143 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.558514 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.558792 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-scripts\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.562513 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-config-data\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.573288 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxf9z\" (UniqueName: \"kubernetes.io/projected/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-kube-api-access-gxf9z\") pod \"ceilometer-0\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " pod="openstack/ceilometer-0" Dec 05 12:31:27 crc kubenswrapper[4711]: I1205 12:31:27.720825 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.034075 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5d757fb76c-sptb4" event={"ID":"c319c3a5-f67a-47d7-bfe3-8e874cf01471","Type":"ContainerStarted","Data":"c02088e6bbd95998c5ecb6a8dd39d2ae4aa4c21c7e93f631594406ff8dba7111"} Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.034131 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5d757fb76c-sptb4" event={"ID":"c319c3a5-f67a-47d7-bfe3-8e874cf01471","Type":"ContainerStarted","Data":"567ccdb85c7df6723f654a75d9aeac1f902654bf4b213deb383f7486eddf8fde"} Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.034143 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5d757fb76c-sptb4" event={"ID":"c319c3a5-f67a-47d7-bfe3-8e874cf01471","Type":"ContainerStarted","Data":"bfebe2a6df05a94fc807505e449bab14aee890f5a31c6ee1c06e71900f33394a"} Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.034232 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.034428 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.037784 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b6bcabfd-50fc-4e19-98ff-9c4f03eb5953","Type":"ContainerStarted","Data":"05fc33a75d3dbb6f163b5e2c40a182c5b3d69b0cd6174d23c195a2b3e842c21e"} Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.068088 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5d757fb76c-sptb4" podStartSLOduration=11.068067103 podStartE2EDuration="11.068067103s" podCreationTimestamp="2025-12-05 12:31:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:28.059225357 +0000 UTC m=+1333.643547697" watchObservedRunningTime="2025-12-05 12:31:28.068067103 +0000 UTC m=+1333.652389433" Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.090622 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.555575793 podStartE2EDuration="23.090600716s" podCreationTimestamp="2025-12-05 12:31:05 +0000 UTC" firstStartedPulling="2025-12-05 12:31:07.136911666 +0000 UTC m=+1312.721234006" lastFinishedPulling="2025-12-05 12:31:26.671936599 +0000 UTC m=+1332.256258929" observedRunningTime="2025-12-05 12:31:28.076114261 +0000 UTC m=+1333.660436601" watchObservedRunningTime="2025-12-05 12:31:28.090600716 +0000 UTC m=+1333.674923046" Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.212578 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:28 crc kubenswrapper[4711]: I1205 12:31:28.698006 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5" path="/var/lib/kubelet/pods/5f92f2d3-2c6c-467e-bdd5-e72de2e57ca5/volumes" Dec 05 12:31:29 crc kubenswrapper[4711]: I1205 12:31:29.050711 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerStarted","Data":"af0babb079f05fd7ab1387d387b0a13ff896f1c1d4e1ce9de97bd36516b42883"} Dec 05 12:31:29 crc kubenswrapper[4711]: I1205 12:31:29.050776 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerStarted","Data":"32358d9b6e35a1287e4be73ef9652a0d17ca001d1394c71178e6a3161bb344f8"} Dec 05 12:31:29 crc kubenswrapper[4711]: I1205 12:31:29.050795 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerStarted","Data":"e5ac6a721a7975d958915142a332b5d6d02d1d738bc297f5831888d7bf263ea6"} Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.063970 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerStarted","Data":"66a4fb5c924f4b2787216fe82b68bfd3a76a79cdf81efb1eb1e13c1caae73da2"} Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.522226 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.619345 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data-custom\") pod \"04d0a904-080f-4fc8-b44f-29d29693847d\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.619442 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r88l\" (UniqueName: \"kubernetes.io/projected/04d0a904-080f-4fc8-b44f-29d29693847d-kube-api-access-6r88l\") pod \"04d0a904-080f-4fc8-b44f-29d29693847d\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.619467 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-scripts\") pod \"04d0a904-080f-4fc8-b44f-29d29693847d\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.619515 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data\") pod \"04d0a904-080f-4fc8-b44f-29d29693847d\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.619568 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-combined-ca-bundle\") pod \"04d0a904-080f-4fc8-b44f-29d29693847d\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.619639 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04d0a904-080f-4fc8-b44f-29d29693847d-logs\") pod \"04d0a904-080f-4fc8-b44f-29d29693847d\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.619709 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04d0a904-080f-4fc8-b44f-29d29693847d-etc-machine-id\") pod \"04d0a904-080f-4fc8-b44f-29d29693847d\" (UID: \"04d0a904-080f-4fc8-b44f-29d29693847d\") " Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.620232 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/04d0a904-080f-4fc8-b44f-29d29693847d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "04d0a904-080f-4fc8-b44f-29d29693847d" (UID: "04d0a904-080f-4fc8-b44f-29d29693847d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.621970 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04d0a904-080f-4fc8-b44f-29d29693847d-logs" (OuterVolumeSpecName: "logs") pod "04d0a904-080f-4fc8-b44f-29d29693847d" (UID: "04d0a904-080f-4fc8-b44f-29d29693847d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.627593 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "04d0a904-080f-4fc8-b44f-29d29693847d" (UID: "04d0a904-080f-4fc8-b44f-29d29693847d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.631517 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04d0a904-080f-4fc8-b44f-29d29693847d-kube-api-access-6r88l" (OuterVolumeSpecName: "kube-api-access-6r88l") pod "04d0a904-080f-4fc8-b44f-29d29693847d" (UID: "04d0a904-080f-4fc8-b44f-29d29693847d"). InnerVolumeSpecName "kube-api-access-6r88l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.632812 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-scripts" (OuterVolumeSpecName: "scripts") pod "04d0a904-080f-4fc8-b44f-29d29693847d" (UID: "04d0a904-080f-4fc8-b44f-29d29693847d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.684194 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04d0a904-080f-4fc8-b44f-29d29693847d" (UID: "04d0a904-080f-4fc8-b44f-29d29693847d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.722284 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.722324 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04d0a904-080f-4fc8-b44f-29d29693847d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.722342 4711 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04d0a904-080f-4fc8-b44f-29d29693847d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.722353 4711 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.722366 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r88l\" (UniqueName: \"kubernetes.io/projected/04d0a904-080f-4fc8-b44f-29d29693847d-kube-api-access-6r88l\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.722380 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.723801 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data" (OuterVolumeSpecName: "config-data") pod "04d0a904-080f-4fc8-b44f-29d29693847d" (UID: "04d0a904-080f-4fc8-b44f-29d29693847d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:30 crc kubenswrapper[4711]: I1205 12:31:30.824156 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d0a904-080f-4fc8-b44f-29d29693847d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.074981 4711 generic.go:334] "Generic (PLEG): container finished" podID="04d0a904-080f-4fc8-b44f-29d29693847d" containerID="0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa" exitCode=137 Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.075047 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04d0a904-080f-4fc8-b44f-29d29693847d","Type":"ContainerDied","Data":"0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa"} Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.075258 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04d0a904-080f-4fc8-b44f-29d29693847d","Type":"ContainerDied","Data":"0e84da4937d0ab617e9fb2206f8ca18b81627bb8add0be9259def4a9b87b6093"} Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.075279 4711 scope.go:117] "RemoveContainer" containerID="0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.075062 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.079019 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerStarted","Data":"29828b1f4594ec5c4aa1c903a603acc93c7098d1b20e90e06f235d45935b6c53"} Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.079238 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.103524 4711 scope.go:117] "RemoveContainer" containerID="185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.111880 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.52266681 podStartE2EDuration="4.11186151s" podCreationTimestamp="2025-12-05 12:31:27 +0000 UTC" firstStartedPulling="2025-12-05 12:31:28.216993645 +0000 UTC m=+1333.801315975" lastFinishedPulling="2025-12-05 12:31:30.806188345 +0000 UTC m=+1336.390510675" observedRunningTime="2025-12-05 12:31:31.105089823 +0000 UTC m=+1336.689412173" watchObservedRunningTime="2025-12-05 12:31:31.11186151 +0000 UTC m=+1336.696183830" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.133436 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.146050 4711 scope.go:117] "RemoveContainer" containerID="0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa" Dec 05 12:31:31 crc kubenswrapper[4711]: E1205 12:31:31.146620 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa\": container with ID starting with 0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa not found: ID does not exist" containerID="0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.146678 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa"} err="failed to get container status \"0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa\": rpc error: code = NotFound desc = could not find container \"0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa\": container with ID starting with 0d275200628ea0c3dfc10b2edff9bd15e317a2109f08265b74ee52b6ca3dc0aa not found: ID does not exist" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.146709 4711 scope.go:117] "RemoveContainer" containerID="185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285" Dec 05 12:31:31 crc kubenswrapper[4711]: E1205 12:31:31.150541 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285\": container with ID starting with 185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285 not found: ID does not exist" containerID="185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.150744 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285"} err="failed to get container status \"185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285\": rpc error: code = NotFound desc = could not find container \"185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285\": container with ID starting with 185de4a95a69c14f9c04024258d67a7902bf30a58179b799da3eeabaec9e5285 not found: ID does not exist" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.150923 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.161415 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:31:31 crc kubenswrapper[4711]: E1205 12:31:31.161955 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" containerName="cinder-api-log" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.161981 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" containerName="cinder-api-log" Dec 05 12:31:31 crc kubenswrapper[4711]: E1205 12:31:31.161995 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" containerName="cinder-api" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.162004 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" containerName="cinder-api" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.162268 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" containerName="cinder-api" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.162309 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" containerName="cinder-api-log" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.163887 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.167155 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.167626 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.170697 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.184200 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.231949 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.232043 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.232074 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89jjr\" (UniqueName: \"kubernetes.io/projected/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-kube-api-access-89jjr\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.232132 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-config-data\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.232170 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-logs\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.232236 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-scripts\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.232341 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-config-data-custom\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.232433 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.232465 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.334198 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.334245 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.334335 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.334381 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.334449 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89jjr\" (UniqueName: \"kubernetes.io/projected/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-kube-api-access-89jjr\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.334470 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-config-data\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.334503 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-logs\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.334525 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-scripts\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.334555 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-config-data-custom\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.335037 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.335269 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-logs\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.338885 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.339294 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-config-data-custom\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.339668 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.340598 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-scripts\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.341201 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.341524 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-config-data\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.352508 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89jjr\" (UniqueName: \"kubernetes.io/projected/0bfa64cd-4859-46f6-9261-8c7f4e63ccfd-kube-api-access-89jjr\") pod \"cinder-api-0\" (UID: \"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd\") " pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.495551 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:31:31 crc kubenswrapper[4711]: I1205 12:31:31.994192 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:31:31 crc kubenswrapper[4711]: W1205 12:31:31.997552 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bfa64cd_4859_46f6_9261_8c7f4e63ccfd.slice/crio-3d2ac55d9740d910740775e54e428049c42be9a65b272518ceba73a8622a2094 WatchSource:0}: Error finding container 3d2ac55d9740d910740775e54e428049c42be9a65b272518ceba73a8622a2094: Status 404 returned error can't find the container with id 3d2ac55d9740d910740775e54e428049c42be9a65b272518ceba73a8622a2094 Dec 05 12:31:32 crc kubenswrapper[4711]: I1205 12:31:32.108649 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd","Type":"ContainerStarted","Data":"3d2ac55d9740d910740775e54e428049c42be9a65b272518ceba73a8622a2094"} Dec 05 12:31:32 crc kubenswrapper[4711]: I1205 12:31:32.695458 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04d0a904-080f-4fc8-b44f-29d29693847d" path="/var/lib/kubelet/pods/04d0a904-080f-4fc8-b44f-29d29693847d/volumes" Dec 05 12:31:32 crc kubenswrapper[4711]: I1205 12:31:32.835309 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:32 crc kubenswrapper[4711]: I1205 12:31:32.836858 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5d757fb76c-sptb4" Dec 05 12:31:33 crc kubenswrapper[4711]: I1205 12:31:33.131371 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd","Type":"ContainerStarted","Data":"dbafeca15d6ba6d97c72db840a5e8f8e695138e92e60be052f6c3270fe42a9b5"} Dec 05 12:31:33 crc kubenswrapper[4711]: I1205 12:31:33.322588 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:33 crc kubenswrapper[4711]: I1205 12:31:33.322836 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="ceilometer-central-agent" containerID="cri-o://32358d9b6e35a1287e4be73ef9652a0d17ca001d1394c71178e6a3161bb344f8" gracePeriod=30 Dec 05 12:31:33 crc kubenswrapper[4711]: I1205 12:31:33.322979 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="proxy-httpd" containerID="cri-o://29828b1f4594ec5c4aa1c903a603acc93c7098d1b20e90e06f235d45935b6c53" gracePeriod=30 Dec 05 12:31:33 crc kubenswrapper[4711]: I1205 12:31:33.323012 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="sg-core" containerID="cri-o://66a4fb5c924f4b2787216fe82b68bfd3a76a79cdf81efb1eb1e13c1caae73da2" gracePeriod=30 Dec 05 12:31:33 crc kubenswrapper[4711]: I1205 12:31:33.323159 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="ceilometer-notification-agent" containerID="cri-o://af0babb079f05fd7ab1387d387b0a13ff896f1c1d4e1ce9de97bd36516b42883" gracePeriod=30 Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.144158 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0bfa64cd-4859-46f6-9261-8c7f4e63ccfd","Type":"ContainerStarted","Data":"3240be672a1c8f00d232fc396d4ac202bc93a9c97bfcdd1c40be6ea2668e77e6"} Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.144543 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.152031 4711 generic.go:334] "Generic (PLEG): container finished" podID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerID="29828b1f4594ec5c4aa1c903a603acc93c7098d1b20e90e06f235d45935b6c53" exitCode=0 Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.152059 4711 generic.go:334] "Generic (PLEG): container finished" podID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerID="66a4fb5c924f4b2787216fe82b68bfd3a76a79cdf81efb1eb1e13c1caae73da2" exitCode=2 Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.152066 4711 generic.go:334] "Generic (PLEG): container finished" podID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerID="af0babb079f05fd7ab1387d387b0a13ff896f1c1d4e1ce9de97bd36516b42883" exitCode=0 Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.152085 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerDied","Data":"29828b1f4594ec5c4aa1c903a603acc93c7098d1b20e90e06f235d45935b6c53"} Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.152110 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerDied","Data":"66a4fb5c924f4b2787216fe82b68bfd3a76a79cdf81efb1eb1e13c1caae73da2"} Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.152121 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerDied","Data":"af0babb079f05fd7ab1387d387b0a13ff896f1c1d4e1ce9de97bd36516b42883"} Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.168204 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.168188213 podStartE2EDuration="3.168188213s" podCreationTimestamp="2025-12-05 12:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:34.166606514 +0000 UTC m=+1339.750928854" watchObservedRunningTime="2025-12-05 12:31:34.168188213 +0000 UTC m=+1339.752510543" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.234661 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-kmnxp"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.235870 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.245736 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-kmnxp"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.293024 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htqb9\" (UniqueName: \"kubernetes.io/projected/250ba9fe-e717-439d-9616-857bc376a77e-kube-api-access-htqb9\") pod \"nova-api-db-create-kmnxp\" (UID: \"250ba9fe-e717-439d-9616-857bc376a77e\") " pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.293157 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/250ba9fe-e717-439d-9616-857bc376a77e-operator-scripts\") pod \"nova-api-db-create-kmnxp\" (UID: \"250ba9fe-e717-439d-9616-857bc376a77e\") " pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.340869 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-lmvv6"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.342497 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.357196 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-c164-account-create-update-pd8cn"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.358774 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.361318 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.368435 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c164-account-create-update-pd8cn"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.384456 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-lmvv6"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.395416 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnj4t\" (UniqueName: \"kubernetes.io/projected/7aa9010c-513f-407e-add2-37b2f0399865-kube-api-access-nnj4t\") pod \"nova-cell0-db-create-lmvv6\" (UID: \"7aa9010c-513f-407e-add2-37b2f0399865\") " pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.395505 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6kd6\" (UniqueName: \"kubernetes.io/projected/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-kube-api-access-z6kd6\") pod \"nova-api-c164-account-create-update-pd8cn\" (UID: \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\") " pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.395545 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htqb9\" (UniqueName: \"kubernetes.io/projected/250ba9fe-e717-439d-9616-857bc376a77e-kube-api-access-htqb9\") pod \"nova-api-db-create-kmnxp\" (UID: \"250ba9fe-e717-439d-9616-857bc376a77e\") " pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.395585 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/250ba9fe-e717-439d-9616-857bc376a77e-operator-scripts\") pod \"nova-api-db-create-kmnxp\" (UID: \"250ba9fe-e717-439d-9616-857bc376a77e\") " pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.395620 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-operator-scripts\") pod \"nova-api-c164-account-create-update-pd8cn\" (UID: \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\") " pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.395666 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7aa9010c-513f-407e-add2-37b2f0399865-operator-scripts\") pod \"nova-cell0-db-create-lmvv6\" (UID: \"7aa9010c-513f-407e-add2-37b2f0399865\") " pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.396705 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/250ba9fe-e717-439d-9616-857bc376a77e-operator-scripts\") pod \"nova-api-db-create-kmnxp\" (UID: \"250ba9fe-e717-439d-9616-857bc376a77e\") " pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.436280 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htqb9\" (UniqueName: \"kubernetes.io/projected/250ba9fe-e717-439d-9616-857bc376a77e-kube-api-access-htqb9\") pod \"nova-api-db-create-kmnxp\" (UID: \"250ba9fe-e717-439d-9616-857bc376a77e\") " pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.496809 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7aa9010c-513f-407e-add2-37b2f0399865-operator-scripts\") pod \"nova-cell0-db-create-lmvv6\" (UID: \"7aa9010c-513f-407e-add2-37b2f0399865\") " pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.496868 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnj4t\" (UniqueName: \"kubernetes.io/projected/7aa9010c-513f-407e-add2-37b2f0399865-kube-api-access-nnj4t\") pod \"nova-cell0-db-create-lmvv6\" (UID: \"7aa9010c-513f-407e-add2-37b2f0399865\") " pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.496925 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6kd6\" (UniqueName: \"kubernetes.io/projected/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-kube-api-access-z6kd6\") pod \"nova-api-c164-account-create-update-pd8cn\" (UID: \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\") " pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.496996 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-operator-scripts\") pod \"nova-api-c164-account-create-update-pd8cn\" (UID: \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\") " pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.497745 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-operator-scripts\") pod \"nova-api-c164-account-create-update-pd8cn\" (UID: \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\") " pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.498214 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7aa9010c-513f-407e-add2-37b2f0399865-operator-scripts\") pod \"nova-cell0-db-create-lmvv6\" (UID: \"7aa9010c-513f-407e-add2-37b2f0399865\") " pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.517294 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6kd6\" (UniqueName: \"kubernetes.io/projected/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-kube-api-access-z6kd6\") pod \"nova-api-c164-account-create-update-pd8cn\" (UID: \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\") " pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.534119 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnj4t\" (UniqueName: \"kubernetes.io/projected/7aa9010c-513f-407e-add2-37b2f0399865-kube-api-access-nnj4t\") pod \"nova-cell0-db-create-lmvv6\" (UID: \"7aa9010c-513f-407e-add2-37b2f0399865\") " pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.538204 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-9rrzg"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.539433 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.558799 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.572885 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-3e3b-account-create-update-tv8bt"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.578141 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.580809 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.611558 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn6gb\" (UniqueName: \"kubernetes.io/projected/f68d680a-2f6f-4227-a356-1005fa0084b6-kube-api-access-qn6gb\") pod \"nova-cell0-3e3b-account-create-update-tv8bt\" (UID: \"f68d680a-2f6f-4227-a356-1005fa0084b6\") " pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.616363 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68d680a-2f6f-4227-a356-1005fa0084b6-operator-scripts\") pod \"nova-cell0-3e3b-account-create-update-tv8bt\" (UID: \"f68d680a-2f6f-4227-a356-1005fa0084b6\") " pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.639494 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3e3b-account-create-update-tv8bt"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.645425 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-9rrzg"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.665854 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.683446 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.720248 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68d680a-2f6f-4227-a356-1005fa0084b6-operator-scripts\") pod \"nova-cell0-3e3b-account-create-update-tv8bt\" (UID: \"f68d680a-2f6f-4227-a356-1005fa0084b6\") " pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.720290 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee061325-4a4d-400a-91fb-9b7d2bf3b383-operator-scripts\") pod \"nova-cell1-db-create-9rrzg\" (UID: \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\") " pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.720344 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mltxw\" (UniqueName: \"kubernetes.io/projected/ee061325-4a4d-400a-91fb-9b7d2bf3b383-kube-api-access-mltxw\") pod \"nova-cell1-db-create-9rrzg\" (UID: \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\") " pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.721158 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn6gb\" (UniqueName: \"kubernetes.io/projected/f68d680a-2f6f-4227-a356-1005fa0084b6-kube-api-access-qn6gb\") pod \"nova-cell0-3e3b-account-create-update-tv8bt\" (UID: \"f68d680a-2f6f-4227-a356-1005fa0084b6\") " pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.721471 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68d680a-2f6f-4227-a356-1005fa0084b6-operator-scripts\") pod \"nova-cell0-3e3b-account-create-update-tv8bt\" (UID: \"f68d680a-2f6f-4227-a356-1005fa0084b6\") " pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.762435 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn6gb\" (UniqueName: \"kubernetes.io/projected/f68d680a-2f6f-4227-a356-1005fa0084b6-kube-api-access-qn6gb\") pod \"nova-cell0-3e3b-account-create-update-tv8bt\" (UID: \"f68d680a-2f6f-4227-a356-1005fa0084b6\") " pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.775981 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-e1ed-account-create-update-tc6t7"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.777580 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.785665 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.801559 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-e1ed-account-create-update-tc6t7"] Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.822413 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee061325-4a4d-400a-91fb-9b7d2bf3b383-operator-scripts\") pod \"nova-cell1-db-create-9rrzg\" (UID: \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\") " pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.822490 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mltxw\" (UniqueName: \"kubernetes.io/projected/ee061325-4a4d-400a-91fb-9b7d2bf3b383-kube-api-access-mltxw\") pod \"nova-cell1-db-create-9rrzg\" (UID: \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\") " pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.822593 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ad4bc45-298a-4ac6-877f-3022b22780cb-operator-scripts\") pod \"nova-cell1-e1ed-account-create-update-tc6t7\" (UID: \"6ad4bc45-298a-4ac6-877f-3022b22780cb\") " pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.822649 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb46j\" (UniqueName: \"kubernetes.io/projected/6ad4bc45-298a-4ac6-877f-3022b22780cb-kube-api-access-tb46j\") pod \"nova-cell1-e1ed-account-create-update-tc6t7\" (UID: \"6ad4bc45-298a-4ac6-877f-3022b22780cb\") " pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.823171 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee061325-4a4d-400a-91fb-9b7d2bf3b383-operator-scripts\") pod \"nova-cell1-db-create-9rrzg\" (UID: \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\") " pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.848282 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mltxw\" (UniqueName: \"kubernetes.io/projected/ee061325-4a4d-400a-91fb-9b7d2bf3b383-kube-api-access-mltxw\") pod \"nova-cell1-db-create-9rrzg\" (UID: \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\") " pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.924450 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ad4bc45-298a-4ac6-877f-3022b22780cb-operator-scripts\") pod \"nova-cell1-e1ed-account-create-update-tc6t7\" (UID: \"6ad4bc45-298a-4ac6-877f-3022b22780cb\") " pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.924582 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb46j\" (UniqueName: \"kubernetes.io/projected/6ad4bc45-298a-4ac6-877f-3022b22780cb-kube-api-access-tb46j\") pod \"nova-cell1-e1ed-account-create-update-tc6t7\" (UID: \"6ad4bc45-298a-4ac6-877f-3022b22780cb\") " pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.925319 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ad4bc45-298a-4ac6-877f-3022b22780cb-operator-scripts\") pod \"nova-cell1-e1ed-account-create-update-tc6t7\" (UID: \"6ad4bc45-298a-4ac6-877f-3022b22780cb\") " pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:34 crc kubenswrapper[4711]: I1205 12:31:34.982136 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb46j\" (UniqueName: \"kubernetes.io/projected/6ad4bc45-298a-4ac6-877f-3022b22780cb-kube-api-access-tb46j\") pod \"nova-cell1-e1ed-account-create-update-tc6t7\" (UID: \"6ad4bc45-298a-4ac6-877f-3022b22780cb\") " pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.036624 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.052907 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.064008 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.143844 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-kmnxp"] Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.245367 4711 generic.go:334] "Generic (PLEG): container finished" podID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerID="32358d9b6e35a1287e4be73ef9652a0d17ca001d1394c71178e6a3161bb344f8" exitCode=0 Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.246444 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerDied","Data":"32358d9b6e35a1287e4be73ef9652a0d17ca001d1394c71178e6a3161bb344f8"} Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.335971 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-lmvv6"] Dec 05 12:31:35 crc kubenswrapper[4711]: W1205 12:31:35.349109 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7aa9010c_513f_407e_add2_37b2f0399865.slice/crio-4337e74d3392babfd1176b02861830831f5f0ba7dd4db82efd1e2f7f0d988d33 WatchSource:0}: Error finding container 4337e74d3392babfd1176b02861830831f5f0ba7dd4db82efd1e2f7f0d988d33: Status 404 returned error can't find the container with id 4337e74d3392babfd1176b02861830831f5f0ba7dd4db82efd1e2f7f0d988d33 Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.405691 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.436219 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-scripts\") pod \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.436303 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxf9z\" (UniqueName: \"kubernetes.io/projected/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-kube-api-access-gxf9z\") pod \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.436334 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-sg-core-conf-yaml\") pod \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.436407 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-log-httpd\") pod \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.436459 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-combined-ca-bundle\") pod \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.436488 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-config-data\") pod \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.436577 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-run-httpd\") pod \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\" (UID: \"dca6b4a9-bc25-4f86-be16-00ee98c04aeb\") " Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.437725 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dca6b4a9-bc25-4f86-be16-00ee98c04aeb" (UID: "dca6b4a9-bc25-4f86-be16-00ee98c04aeb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.437782 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dca6b4a9-bc25-4f86-be16-00ee98c04aeb" (UID: "dca6b4a9-bc25-4f86-be16-00ee98c04aeb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.443069 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-kube-api-access-gxf9z" (OuterVolumeSpecName: "kube-api-access-gxf9z") pod "dca6b4a9-bc25-4f86-be16-00ee98c04aeb" (UID: "dca6b4a9-bc25-4f86-be16-00ee98c04aeb"). InnerVolumeSpecName "kube-api-access-gxf9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.445842 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-scripts" (OuterVolumeSpecName: "scripts") pod "dca6b4a9-bc25-4f86-be16-00ee98c04aeb" (UID: "dca6b4a9-bc25-4f86-be16-00ee98c04aeb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.494771 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dca6b4a9-bc25-4f86-be16-00ee98c04aeb" (UID: "dca6b4a9-bc25-4f86-be16-00ee98c04aeb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.536641 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c164-account-create-update-pd8cn"] Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.541198 4711 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.541221 4711 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.541230 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.541240 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxf9z\" (UniqueName: \"kubernetes.io/projected/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-kube-api-access-gxf9z\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.541250 4711 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.590905 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dca6b4a9-bc25-4f86-be16-00ee98c04aeb" (UID: "dca6b4a9-bc25-4f86-be16-00ee98c04aeb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.642690 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.650577 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-config-data" (OuterVolumeSpecName: "config-data") pod "dca6b4a9-bc25-4f86-be16-00ee98c04aeb" (UID: "dca6b4a9-bc25-4f86-be16-00ee98c04aeb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.684010 4711 scope.go:117] "RemoveContainer" containerID="b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.736015 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-9rrzg"] Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.744178 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dca6b4a9-bc25-4f86-be16-00ee98c04aeb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.754148 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3e3b-account-create-update-tv8bt"] Dec 05 12:31:35 crc kubenswrapper[4711]: W1205 12:31:35.767996 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf68d680a_2f6f_4227_a356_1005fa0084b6.slice/crio-6420d93f1b9d62db3170820a406edd6177e56b56d97649ea689c6fb62c2dba28 WatchSource:0}: Error finding container 6420d93f1b9d62db3170820a406edd6177e56b56d97649ea689c6fb62c2dba28: Status 404 returned error can't find the container with id 6420d93f1b9d62db3170820a406edd6177e56b56d97649ea689c6fb62c2dba28 Dec 05 12:31:35 crc kubenswrapper[4711]: I1205 12:31:35.861043 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-e1ed-account-create-update-tc6t7"] Dec 05 12:31:35 crc kubenswrapper[4711]: W1205 12:31:35.875448 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ad4bc45_298a_4ac6_877f_3022b22780cb.slice/crio-da2f1ac8fbf685e82029dcef5cbd9b57ce25322d2d2d9cd79f92fe170d26a9c7 WatchSource:0}: Error finding container da2f1ac8fbf685e82029dcef5cbd9b57ce25322d2d2d9cd79f92fe170d26a9c7: Status 404 returned error can't find the container with id da2f1ac8fbf685e82029dcef5cbd9b57ce25322d2d2d9cd79f92fe170d26a9c7 Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.258908 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9rrzg" event={"ID":"ee061325-4a4d-400a-91fb-9b7d2bf3b383","Type":"ContainerStarted","Data":"a1bf91b461854d460bc6d6cc27a79879d8749b5f4d7b8e7d907653b14c657f8f"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.265619 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dca6b4a9-bc25-4f86-be16-00ee98c04aeb","Type":"ContainerDied","Data":"e5ac6a721a7975d958915142a332b5d6d02d1d738bc297f5831888d7bf263ea6"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.265685 4711 scope.go:117] "RemoveContainer" containerID="29828b1f4594ec5c4aa1c903a603acc93c7098d1b20e90e06f235d45935b6c53" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.265728 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.268149 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" event={"ID":"6ad4bc45-298a-4ac6-877f-3022b22780cb","Type":"ContainerStarted","Data":"da2f1ac8fbf685e82029dcef5cbd9b57ce25322d2d2d9cd79f92fe170d26a9c7"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.272200 4711 generic.go:334] "Generic (PLEG): container finished" podID="09675ca4-9d45-4a8f-b26a-74aa1f3410e9" containerID="324c860ebe5858829a4c7bfa335ee69c77f7de48e37de40d0f850ac730609138" exitCode=0 Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.273021 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c164-account-create-update-pd8cn" event={"ID":"09675ca4-9d45-4a8f-b26a-74aa1f3410e9","Type":"ContainerDied","Data":"324c860ebe5858829a4c7bfa335ee69c77f7de48e37de40d0f850ac730609138"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.273058 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c164-account-create-update-pd8cn" event={"ID":"09675ca4-9d45-4a8f-b26a-74aa1f3410e9","Type":"ContainerStarted","Data":"d05c171fb19ceae686699fdf7a182aa71653c52c8f9864a3eb97a10b190ef7cd"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.278657 4711 generic.go:334] "Generic (PLEG): container finished" podID="250ba9fe-e717-439d-9616-857bc376a77e" containerID="df160c490f72fbe482eebaaad2345427901a9b41395aa3bdb0b7882db9767f05" exitCode=0 Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.278724 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-kmnxp" event={"ID":"250ba9fe-e717-439d-9616-857bc376a77e","Type":"ContainerDied","Data":"df160c490f72fbe482eebaaad2345427901a9b41395aa3bdb0b7882db9767f05"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.278793 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-kmnxp" event={"ID":"250ba9fe-e717-439d-9616-857bc376a77e","Type":"ContainerStarted","Data":"985b48829f2b7cab0a96cd2539d7b771f8823b00fa61baee20840257ee5ede8a"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.291759 4711 generic.go:334] "Generic (PLEG): container finished" podID="7aa9010c-513f-407e-add2-37b2f0399865" containerID="fc6ffb95647d2dd3c2afc5a21057a5b59271165f14a79d0162e5af29cf89e81f" exitCode=0 Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.291846 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-lmvv6" event={"ID":"7aa9010c-513f-407e-add2-37b2f0399865","Type":"ContainerDied","Data":"fc6ffb95647d2dd3c2afc5a21057a5b59271165f14a79d0162e5af29cf89e81f"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.291876 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-lmvv6" event={"ID":"7aa9010c-513f-407e-add2-37b2f0399865","Type":"ContainerStarted","Data":"4337e74d3392babfd1176b02861830831f5f0ba7dd4db82efd1e2f7f0d988d33"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.293841 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" event={"ID":"f68d680a-2f6f-4227-a356-1005fa0084b6","Type":"ContainerStarted","Data":"6420d93f1b9d62db3170820a406edd6177e56b56d97649ea689c6fb62c2dba28"} Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.319553 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.321448 4711 scope.go:117] "RemoveContainer" containerID="66a4fb5c924f4b2787216fe82b68bfd3a76a79cdf81efb1eb1e13c1caae73da2" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.345854 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.347768 4711 scope.go:117] "RemoveContainer" containerID="af0babb079f05fd7ab1387d387b0a13ff896f1c1d4e1ce9de97bd36516b42883" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.366211 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:36 crc kubenswrapper[4711]: E1205 12:31:36.366695 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="sg-core" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.366716 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="sg-core" Dec 05 12:31:36 crc kubenswrapper[4711]: E1205 12:31:36.366731 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="ceilometer-central-agent" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.366738 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="ceilometer-central-agent" Dec 05 12:31:36 crc kubenswrapper[4711]: E1205 12:31:36.366753 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="ceilometer-notification-agent" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.366760 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="ceilometer-notification-agent" Dec 05 12:31:36 crc kubenswrapper[4711]: E1205 12:31:36.366782 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="proxy-httpd" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.366788 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="proxy-httpd" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.366966 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="ceilometer-central-agent" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.366982 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="sg-core" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.366997 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="ceilometer-notification-agent" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.367015 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" containerName="proxy-httpd" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.370195 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.375114 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.380928 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.381699 4711 scope.go:117] "RemoveContainer" containerID="32358d9b6e35a1287e4be73ef9652a0d17ca001d1394c71178e6a3161bb344f8" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.417100 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.559947 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.560047 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-log-httpd\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.560076 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-config-data\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.560891 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-run-httpd\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.560955 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.561204 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-scripts\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.561285 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqgbh\" (UniqueName: \"kubernetes.io/projected/b64b5e93-159f-42be-b8ff-515174839b73-kube-api-access-fqgbh\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.663583 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqgbh\" (UniqueName: \"kubernetes.io/projected/b64b5e93-159f-42be-b8ff-515174839b73-kube-api-access-fqgbh\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.663653 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.663698 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-log-httpd\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.663716 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-config-data\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.663772 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-run-httpd\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.663790 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.663851 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-scripts\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.665646 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-log-httpd\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.665694 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-run-httpd\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.674048 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.674273 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.674324 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-scripts\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.674759 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-config-data\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.683220 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqgbh\" (UniqueName: \"kubernetes.io/projected/b64b5e93-159f-42be-b8ff-515174839b73-kube-api-access-fqgbh\") pod \"ceilometer-0\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " pod="openstack/ceilometer-0" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.693730 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dca6b4a9-bc25-4f86-be16-00ee98c04aeb" path="/var/lib/kubelet/pods/dca6b4a9-bc25-4f86-be16-00ee98c04aeb/volumes" Dec 05 12:31:36 crc kubenswrapper[4711]: I1205 12:31:36.711146 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.203961 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.315168 4711 generic.go:334] "Generic (PLEG): container finished" podID="ee061325-4a4d-400a-91fb-9b7d2bf3b383" containerID="a5ca090c39c1f25b306924e19653964389568ec2f929b9fe6b9084d0518e9181" exitCode=0 Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.315584 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9rrzg" event={"ID":"ee061325-4a4d-400a-91fb-9b7d2bf3b383","Type":"ContainerDied","Data":"a5ca090c39c1f25b306924e19653964389568ec2f929b9fe6b9084d0518e9181"} Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.328840 4711 generic.go:334] "Generic (PLEG): container finished" podID="6ad4bc45-298a-4ac6-877f-3022b22780cb" containerID="3a8802f0d43c02de1793626e29f0e747e8a9ff352817d51546fe0cca8f683804" exitCode=0 Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.328978 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" event={"ID":"6ad4bc45-298a-4ac6-877f-3022b22780cb","Type":"ContainerDied","Data":"3a8802f0d43c02de1793626e29f0e747e8a9ff352817d51546fe0cca8f683804"} Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.353122 4711 generic.go:334] "Generic (PLEG): container finished" podID="f68d680a-2f6f-4227-a356-1005fa0084b6" containerID="fdaaf6a4a5793004b5f54a98b5c326a3baea5a2cafa3509ff5782ce334d48f36" exitCode=0 Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.353207 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" event={"ID":"f68d680a-2f6f-4227-a356-1005fa0084b6","Type":"ContainerDied","Data":"fdaaf6a4a5793004b5f54a98b5c326a3baea5a2cafa3509ff5782ce334d48f36"} Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.374061 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerStarted","Data":"5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191"} Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.381049 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerStarted","Data":"b26932add23e62787947f2be2138543eb0fe871b2d6a15ba693a24e58f95dbdb"} Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.730989 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.896373 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/250ba9fe-e717-439d-9616-857bc376a77e-operator-scripts\") pod \"250ba9fe-e717-439d-9616-857bc376a77e\" (UID: \"250ba9fe-e717-439d-9616-857bc376a77e\") " Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.896924 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htqb9\" (UniqueName: \"kubernetes.io/projected/250ba9fe-e717-439d-9616-857bc376a77e-kube-api-access-htqb9\") pod \"250ba9fe-e717-439d-9616-857bc376a77e\" (UID: \"250ba9fe-e717-439d-9616-857bc376a77e\") " Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.899325 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/250ba9fe-e717-439d-9616-857bc376a77e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "250ba9fe-e717-439d-9616-857bc376a77e" (UID: "250ba9fe-e717-439d-9616-857bc376a77e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:37 crc kubenswrapper[4711]: I1205 12:31:37.919448 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/250ba9fe-e717-439d-9616-857bc376a77e-kube-api-access-htqb9" (OuterVolumeSpecName: "kube-api-access-htqb9") pod "250ba9fe-e717-439d-9616-857bc376a77e" (UID: "250ba9fe-e717-439d-9616-857bc376a77e"). InnerVolumeSpecName "kube-api-access-htqb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.006474 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htqb9\" (UniqueName: \"kubernetes.io/projected/250ba9fe-e717-439d-9616-857bc376a77e-kube-api-access-htqb9\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.006535 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/250ba9fe-e717-439d-9616-857bc376a77e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.057859 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.067793 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.209575 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-operator-scripts\") pod \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\" (UID: \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\") " Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.209835 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7aa9010c-513f-407e-add2-37b2f0399865-operator-scripts\") pod \"7aa9010c-513f-407e-add2-37b2f0399865\" (UID: \"7aa9010c-513f-407e-add2-37b2f0399865\") " Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.209959 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnj4t\" (UniqueName: \"kubernetes.io/projected/7aa9010c-513f-407e-add2-37b2f0399865-kube-api-access-nnj4t\") pod \"7aa9010c-513f-407e-add2-37b2f0399865\" (UID: \"7aa9010c-513f-407e-add2-37b2f0399865\") " Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.209989 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6kd6\" (UniqueName: \"kubernetes.io/projected/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-kube-api-access-z6kd6\") pod \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\" (UID: \"09675ca4-9d45-4a8f-b26a-74aa1f3410e9\") " Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.210316 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "09675ca4-9d45-4a8f-b26a-74aa1f3410e9" (UID: "09675ca4-9d45-4a8f-b26a-74aa1f3410e9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.210621 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7aa9010c-513f-407e-add2-37b2f0399865-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7aa9010c-513f-407e-add2-37b2f0399865" (UID: "7aa9010c-513f-407e-add2-37b2f0399865"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.211644 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.211700 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7aa9010c-513f-407e-add2-37b2f0399865-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.217671 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-kube-api-access-z6kd6" (OuterVolumeSpecName: "kube-api-access-z6kd6") pod "09675ca4-9d45-4a8f-b26a-74aa1f3410e9" (UID: "09675ca4-9d45-4a8f-b26a-74aa1f3410e9"). InnerVolumeSpecName "kube-api-access-z6kd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.228754 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7aa9010c-513f-407e-add2-37b2f0399865-kube-api-access-nnj4t" (OuterVolumeSpecName: "kube-api-access-nnj4t") pod "7aa9010c-513f-407e-add2-37b2f0399865" (UID: "7aa9010c-513f-407e-add2-37b2f0399865"). InnerVolumeSpecName "kube-api-access-nnj4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.313757 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnj4t\" (UniqueName: \"kubernetes.io/projected/7aa9010c-513f-407e-add2-37b2f0399865-kube-api-access-nnj4t\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.313807 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6kd6\" (UniqueName: \"kubernetes.io/projected/09675ca4-9d45-4a8f-b26a-74aa1f3410e9-kube-api-access-z6kd6\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.394256 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c164-account-create-update-pd8cn" event={"ID":"09675ca4-9d45-4a8f-b26a-74aa1f3410e9","Type":"ContainerDied","Data":"d05c171fb19ceae686699fdf7a182aa71653c52c8f9864a3eb97a10b190ef7cd"} Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.394310 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d05c171fb19ceae686699fdf7a182aa71653c52c8f9864a3eb97a10b190ef7cd" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.394277 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c164-account-create-update-pd8cn" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.396631 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-kmnxp" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.396622 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-kmnxp" event={"ID":"250ba9fe-e717-439d-9616-857bc376a77e","Type":"ContainerDied","Data":"985b48829f2b7cab0a96cd2539d7b771f8823b00fa61baee20840257ee5ede8a"} Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.396860 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="985b48829f2b7cab0a96cd2539d7b771f8823b00fa61baee20840257ee5ede8a" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.398926 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-lmvv6" event={"ID":"7aa9010c-513f-407e-add2-37b2f0399865","Type":"ContainerDied","Data":"4337e74d3392babfd1176b02861830831f5f0ba7dd4db82efd1e2f7f0d988d33"} Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.399022 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4337e74d3392babfd1176b02861830831f5f0ba7dd4db82efd1e2f7f0d988d33" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.398965 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-lmvv6" Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.401167 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerStarted","Data":"d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf"} Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.401214 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerStarted","Data":"1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77"} Dec 05 12:31:38 crc kubenswrapper[4711]: I1205 12:31:38.936947 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.041797 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qn6gb\" (UniqueName: \"kubernetes.io/projected/f68d680a-2f6f-4227-a356-1005fa0084b6-kube-api-access-qn6gb\") pod \"f68d680a-2f6f-4227-a356-1005fa0084b6\" (UID: \"f68d680a-2f6f-4227-a356-1005fa0084b6\") " Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.042164 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68d680a-2f6f-4227-a356-1005fa0084b6-operator-scripts\") pod \"f68d680a-2f6f-4227-a356-1005fa0084b6\" (UID: \"f68d680a-2f6f-4227-a356-1005fa0084b6\") " Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.043641 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f68d680a-2f6f-4227-a356-1005fa0084b6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f68d680a-2f6f-4227-a356-1005fa0084b6" (UID: "f68d680a-2f6f-4227-a356-1005fa0084b6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.047273 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f68d680a-2f6f-4227-a356-1005fa0084b6-kube-api-access-qn6gb" (OuterVolumeSpecName: "kube-api-access-qn6gb") pod "f68d680a-2f6f-4227-a356-1005fa0084b6" (UID: "f68d680a-2f6f-4227-a356-1005fa0084b6"). InnerVolumeSpecName "kube-api-access-qn6gb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.144630 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qn6gb\" (UniqueName: \"kubernetes.io/projected/f68d680a-2f6f-4227-a356-1005fa0084b6-kube-api-access-qn6gb\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.144669 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68d680a-2f6f-4227-a356-1005fa0084b6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.309700 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.317034 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.420693 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" event={"ID":"f68d680a-2f6f-4227-a356-1005fa0084b6","Type":"ContainerDied","Data":"6420d93f1b9d62db3170820a406edd6177e56b56d97649ea689c6fb62c2dba28"} Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.420760 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6420d93f1b9d62db3170820a406edd6177e56b56d97649ea689c6fb62c2dba28" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.420760 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3e3b-account-create-update-tv8bt" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.427662 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerStarted","Data":"e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f"} Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.429403 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9rrzg" event={"ID":"ee061325-4a4d-400a-91fb-9b7d2bf3b383","Type":"ContainerDied","Data":"a1bf91b461854d460bc6d6cc27a79879d8749b5f4d7b8e7d907653b14c657f8f"} Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.429432 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1bf91b461854d460bc6d6cc27a79879d8749b5f4d7b8e7d907653b14c657f8f" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.429507 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9rrzg" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.431752 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" event={"ID":"6ad4bc45-298a-4ac6-877f-3022b22780cb","Type":"ContainerDied","Data":"da2f1ac8fbf685e82029dcef5cbd9b57ce25322d2d2d9cd79f92fe170d26a9c7"} Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.431782 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da2f1ac8fbf685e82029dcef5cbd9b57ce25322d2d2d9cd79f92fe170d26a9c7" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.431835 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e1ed-account-create-update-tc6t7" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.449268 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee061325-4a4d-400a-91fb-9b7d2bf3b383-operator-scripts\") pod \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\" (UID: \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\") " Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.449345 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb46j\" (UniqueName: \"kubernetes.io/projected/6ad4bc45-298a-4ac6-877f-3022b22780cb-kube-api-access-tb46j\") pod \"6ad4bc45-298a-4ac6-877f-3022b22780cb\" (UID: \"6ad4bc45-298a-4ac6-877f-3022b22780cb\") " Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.449458 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mltxw\" (UniqueName: \"kubernetes.io/projected/ee061325-4a4d-400a-91fb-9b7d2bf3b383-kube-api-access-mltxw\") pod \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\" (UID: \"ee061325-4a4d-400a-91fb-9b7d2bf3b383\") " Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.449510 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ad4bc45-298a-4ac6-877f-3022b22780cb-operator-scripts\") pod \"6ad4bc45-298a-4ac6-877f-3022b22780cb\" (UID: \"6ad4bc45-298a-4ac6-877f-3022b22780cb\") " Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.450135 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ad4bc45-298a-4ac6-877f-3022b22780cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6ad4bc45-298a-4ac6-877f-3022b22780cb" (UID: "6ad4bc45-298a-4ac6-877f-3022b22780cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.450344 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee061325-4a4d-400a-91fb-9b7d2bf3b383-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ee061325-4a4d-400a-91fb-9b7d2bf3b383" (UID: "ee061325-4a4d-400a-91fb-9b7d2bf3b383"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.455910 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee061325-4a4d-400a-91fb-9b7d2bf3b383-kube-api-access-mltxw" (OuterVolumeSpecName: "kube-api-access-mltxw") pod "ee061325-4a4d-400a-91fb-9b7d2bf3b383" (UID: "ee061325-4a4d-400a-91fb-9b7d2bf3b383"). InnerVolumeSpecName "kube-api-access-mltxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.456192 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ad4bc45-298a-4ac6-877f-3022b22780cb-kube-api-access-tb46j" (OuterVolumeSpecName: "kube-api-access-tb46j") pod "6ad4bc45-298a-4ac6-877f-3022b22780cb" (UID: "6ad4bc45-298a-4ac6-877f-3022b22780cb"). InnerVolumeSpecName "kube-api-access-tb46j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.514788 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.551919 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee061325-4a4d-400a-91fb-9b7d2bf3b383-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.551959 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb46j\" (UniqueName: \"kubernetes.io/projected/6ad4bc45-298a-4ac6-877f-3022b22780cb-kube-api-access-tb46j\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.551973 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mltxw\" (UniqueName: \"kubernetes.io/projected/ee061325-4a4d-400a-91fb-9b7d2bf3b383-kube-api-access-mltxw\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:39 crc kubenswrapper[4711]: I1205 12:31:39.551991 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ad4bc45-298a-4ac6-877f-3022b22780cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:40 crc kubenswrapper[4711]: E1205 12:31:40.667342 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fe97f8e_7a0e_40ef_8cab_3530224b79ee.slice/crio-5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fe97f8e_7a0e_40ef_8cab_3530224b79ee.slice/crio-conmon-5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191.scope\": RecentStats: unable to find data in memory cache]" Dec 05 12:31:40 crc kubenswrapper[4711]: I1205 12:31:40.850192 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:31:40 crc kubenswrapper[4711]: E1205 12:31:40.851570 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191 is running failed: container process not found" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Dec 05 12:31:40 crc kubenswrapper[4711]: E1205 12:31:40.852054 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191 is running failed: container process not found" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Dec 05 12:31:40 crc kubenswrapper[4711]: E1205 12:31:40.852328 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191 is running failed: container process not found" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Dec 05 12:31:40 crc kubenswrapper[4711]: E1205 12:31:40.852357 4711 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191 is running failed: container process not found" probeType="Startup" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.497437 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerDied","Data":"5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191"} Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.497367 4711 generic.go:334] "Generic (PLEG): container finished" podID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" exitCode=1 Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.497593 4711 scope.go:117] "RemoveContainer" containerID="b65755dbc6d82327fedde0c0fd0fb944cf512da5314b947b1cc76ddf16bb0d74" Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.498222 4711 scope.go:117] "RemoveContainer" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" Dec 05 12:31:41 crc kubenswrapper[4711]: E1205 12:31:41.498702 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.505929 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerStarted","Data":"57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8"} Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.506088 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="ceilometer-central-agent" containerID="cri-o://1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77" gracePeriod=30 Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.506168 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.506204 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="proxy-httpd" containerID="cri-o://57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8" gracePeriod=30 Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.506240 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="sg-core" containerID="cri-o://e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f" gracePeriod=30 Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.506288 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="ceilometer-notification-agent" containerID="cri-o://d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf" gracePeriod=30 Dec 05 12:31:41 crc kubenswrapper[4711]: I1205 12:31:41.566441 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.596854236 podStartE2EDuration="5.566421371s" podCreationTimestamp="2025-12-05 12:31:36 +0000 UTC" firstStartedPulling="2025-12-05 12:31:37.215333121 +0000 UTC m=+1342.799655451" lastFinishedPulling="2025-12-05 12:31:40.184900256 +0000 UTC m=+1345.769222586" observedRunningTime="2025-12-05 12:31:41.559340817 +0000 UTC m=+1347.143663147" watchObservedRunningTime="2025-12-05 12:31:41.566421371 +0000 UTC m=+1347.150743721" Dec 05 12:31:42 crc kubenswrapper[4711]: I1205 12:31:42.526988 4711 generic.go:334] "Generic (PLEG): container finished" podID="b64b5e93-159f-42be-b8ff-515174839b73" containerID="57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8" exitCode=0 Dec 05 12:31:42 crc kubenswrapper[4711]: I1205 12:31:42.527044 4711 generic.go:334] "Generic (PLEG): container finished" podID="b64b5e93-159f-42be-b8ff-515174839b73" containerID="e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f" exitCode=2 Dec 05 12:31:42 crc kubenswrapper[4711]: I1205 12:31:42.527052 4711 generic.go:334] "Generic (PLEG): container finished" podID="b64b5e93-159f-42be-b8ff-515174839b73" containerID="d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf" exitCode=0 Dec 05 12:31:42 crc kubenswrapper[4711]: I1205 12:31:42.527038 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerDied","Data":"57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8"} Dec 05 12:31:42 crc kubenswrapper[4711]: I1205 12:31:42.527094 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerDied","Data":"e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f"} Dec 05 12:31:42 crc kubenswrapper[4711]: I1205 12:31:42.527105 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerDied","Data":"d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf"} Dec 05 12:31:43 crc kubenswrapper[4711]: I1205 12:31:43.497685 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:43 crc kubenswrapper[4711]: I1205 12:31:43.497999 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerName="glance-log" containerID="cri-o://e83ece53a0980a332f4d448350613ae812f0ea913ff4f5bf0022fc050c72dd64" gracePeriod=30 Dec 05 12:31:43 crc kubenswrapper[4711]: I1205 12:31:43.498078 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerName="glance-httpd" containerID="cri-o://dc69e40bb76139c0eee1cd93626080cba7d3ef950c8213c9c1bfb230457117d0" gracePeriod=30 Dec 05 12:31:43 crc kubenswrapper[4711]: I1205 12:31:43.838711 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.557014 4711 generic.go:334] "Generic (PLEG): container finished" podID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerID="dc69e40bb76139c0eee1cd93626080cba7d3ef950c8213c9c1bfb230457117d0" exitCode=0 Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.557363 4711 generic.go:334] "Generic (PLEG): container finished" podID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerID="e83ece53a0980a332f4d448350613ae812f0ea913ff4f5bf0022fc050c72dd64" exitCode=143 Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.557293 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29","Type":"ContainerDied","Data":"dc69e40bb76139c0eee1cd93626080cba7d3ef950c8213c9c1bfb230457117d0"} Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.557430 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29","Type":"ContainerDied","Data":"e83ece53a0980a332f4d448350613ae812f0ea913ff4f5bf0022fc050c72dd64"} Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.890763 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k9dtz"] Dec 05 12:31:44 crc kubenswrapper[4711]: E1205 12:31:44.891406 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aa9010c-513f-407e-add2-37b2f0399865" containerName="mariadb-database-create" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891418 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aa9010c-513f-407e-add2-37b2f0399865" containerName="mariadb-database-create" Dec 05 12:31:44 crc kubenswrapper[4711]: E1205 12:31:44.891427 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="250ba9fe-e717-439d-9616-857bc376a77e" containerName="mariadb-database-create" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891433 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="250ba9fe-e717-439d-9616-857bc376a77e" containerName="mariadb-database-create" Dec 05 12:31:44 crc kubenswrapper[4711]: E1205 12:31:44.891461 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ad4bc45-298a-4ac6-877f-3022b22780cb" containerName="mariadb-account-create-update" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891467 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ad4bc45-298a-4ac6-877f-3022b22780cb" containerName="mariadb-account-create-update" Dec 05 12:31:44 crc kubenswrapper[4711]: E1205 12:31:44.891496 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09675ca4-9d45-4a8f-b26a-74aa1f3410e9" containerName="mariadb-account-create-update" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891501 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="09675ca4-9d45-4a8f-b26a-74aa1f3410e9" containerName="mariadb-account-create-update" Dec 05 12:31:44 crc kubenswrapper[4711]: E1205 12:31:44.891514 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee061325-4a4d-400a-91fb-9b7d2bf3b383" containerName="mariadb-database-create" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891519 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee061325-4a4d-400a-91fb-9b7d2bf3b383" containerName="mariadb-database-create" Dec 05 12:31:44 crc kubenswrapper[4711]: E1205 12:31:44.891531 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f68d680a-2f6f-4227-a356-1005fa0084b6" containerName="mariadb-account-create-update" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891537 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f68d680a-2f6f-4227-a356-1005fa0084b6" containerName="mariadb-account-create-update" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891695 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ad4bc45-298a-4ac6-877f-3022b22780cb" containerName="mariadb-account-create-update" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891712 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aa9010c-513f-407e-add2-37b2f0399865" containerName="mariadb-database-create" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891721 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f68d680a-2f6f-4227-a356-1005fa0084b6" containerName="mariadb-account-create-update" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891740 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="250ba9fe-e717-439d-9616-857bc376a77e" containerName="mariadb-database-create" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891748 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="09675ca4-9d45-4a8f-b26a-74aa1f3410e9" containerName="mariadb-account-create-update" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.891758 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee061325-4a4d-400a-91fb-9b7d2bf3b383" containerName="mariadb-database-create" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.892919 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.898484 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.899864 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.905688 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mwm2w" Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.906334 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k9dtz"] Dec 05 12:31:44 crc kubenswrapper[4711]: I1205 12:31:44.956266 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.066688 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-combined-ca-bundle\") pod \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.066916 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8wdq\" (UniqueName: \"kubernetes.io/projected/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-kube-api-access-t8wdq\") pod \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.067874 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-httpd-run\") pod \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.067929 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.067958 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-config-data\") pod \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.068330 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-scripts\") pod \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.068541 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-logs\") pod \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.068626 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-public-tls-certs\") pod \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\" (UID: \"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29\") " Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.069273 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-config-data\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.069549 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgwh6\" (UniqueName: \"kubernetes.io/projected/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-kube-api-access-wgwh6\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.069589 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.069706 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-scripts\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.070073 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" (UID: "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.071734 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-logs" (OuterVolumeSpecName: "logs") pod "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" (UID: "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.075519 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-kube-api-access-t8wdq" (OuterVolumeSpecName: "kube-api-access-t8wdq") pod "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" (UID: "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29"). InnerVolumeSpecName "kube-api-access-t8wdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.091684 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-scripts" (OuterVolumeSpecName: "scripts") pod "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" (UID: "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.094972 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" (UID: "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.134557 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" (UID: "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.170997 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgwh6\" (UniqueName: \"kubernetes.io/projected/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-kube-api-access-wgwh6\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.171046 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.171099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-scripts\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.171188 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-config-data\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.171253 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.171263 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8wdq\" (UniqueName: \"kubernetes.io/projected/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-kube-api-access-t8wdq\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.171274 4711 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.171292 4711 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.171301 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.171311 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.179655 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-config-data\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.179955 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-scripts\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.186158 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.192240 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgwh6\" (UniqueName: \"kubernetes.io/projected/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-kube-api-access-wgwh6\") pod \"nova-cell0-conductor-db-sync-k9dtz\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.229411 4711 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.244098 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" (UID: "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.244607 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-config-data" (OuterVolumeSpecName: "config-data") pod "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" (UID: "22e37a86-b630-47ee-ad7f-9c6cdcc2fe29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.267436 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.276950 4711 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.276991 4711 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.277006 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.577783 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"22e37a86-b630-47ee-ad7f-9c6cdcc2fe29","Type":"ContainerDied","Data":"4eb22a93553d0a902c08f65ad2ad2d4e607420729d68b64b253587b29b89fffa"} Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.578328 4711 scope.go:117] "RemoveContainer" containerID="dc69e40bb76139c0eee1cd93626080cba7d3ef950c8213c9c1bfb230457117d0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.578378 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.626598 4711 scope.go:117] "RemoveContainer" containerID="e83ece53a0980a332f4d448350613ae812f0ea913ff4f5bf0022fc050c72dd64" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.638464 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.661515 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.672470 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:45 crc kubenswrapper[4711]: E1205 12:31:45.672948 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerName="glance-httpd" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.672967 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerName="glance-httpd" Dec 05 12:31:45 crc kubenswrapper[4711]: E1205 12:31:45.673006 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerName="glance-log" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.673013 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerName="glance-log" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.673190 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerName="glance-log" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.673210 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" containerName="glance-httpd" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.674321 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.676616 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.676854 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.682034 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.771947 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k9dtz"] Dec 05 12:31:45 crc kubenswrapper[4711]: W1205 12:31:45.772656 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6feb7c1f_2d77_4b1a_8c35_1b7afb48a8c8.slice/crio-c25901bdc495311f1474ca4234f51a9425cf4d473b2e39bd5915eee847ecf7a3 WatchSource:0}: Error finding container c25901bdc495311f1474ca4234f51a9425cf4d473b2e39bd5915eee847ecf7a3: Status 404 returned error can't find the container with id c25901bdc495311f1474ca4234f51a9425cf4d473b2e39bd5915eee847ecf7a3 Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.788437 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d707502b-314c-43db-8347-fd6db2e3280e-logs\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.788526 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-scripts\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.788581 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-config-data\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.788639 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.788739 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.788756 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d707502b-314c-43db-8347-fd6db2e3280e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.788782 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.788814 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwls8\" (UniqueName: \"kubernetes.io/projected/d707502b-314c-43db-8347-fd6db2e3280e-kube-api-access-jwls8\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.890870 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-scripts\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.891159 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-config-data\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.891185 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.891243 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.891262 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d707502b-314c-43db-8347-fd6db2e3280e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.891287 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.891314 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwls8\" (UniqueName: \"kubernetes.io/projected/d707502b-314c-43db-8347-fd6db2e3280e-kube-api-access-jwls8\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.891408 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d707502b-314c-43db-8347-fd6db2e3280e-logs\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.891848 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d707502b-314c-43db-8347-fd6db2e3280e-logs\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.892856 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d707502b-314c-43db-8347-fd6db2e3280e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.894987 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.921001 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-scripts\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.921443 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.922691 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwls8\" (UniqueName: \"kubernetes.io/projected/d707502b-314c-43db-8347-fd6db2e3280e-kube-api-access-jwls8\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.925008 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.925560 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d707502b-314c-43db-8347-fd6db2e3280e-config-data\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:45 crc kubenswrapper[4711]: I1205 12:31:45.954345 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"d707502b-314c-43db-8347-fd6db2e3280e\") " pod="openstack/glance-default-external-api-0" Dec 05 12:31:46 crc kubenswrapper[4711]: I1205 12:31:46.002349 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:31:46 crc kubenswrapper[4711]: I1205 12:31:46.595158 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k9dtz" event={"ID":"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8","Type":"ContainerStarted","Data":"c25901bdc495311f1474ca4234f51a9425cf4d473b2e39bd5915eee847ecf7a3"} Dec 05 12:31:46 crc kubenswrapper[4711]: I1205 12:31:46.635167 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:31:46 crc kubenswrapper[4711]: W1205 12:31:46.641463 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd707502b_314c_43db_8347_fd6db2e3280e.slice/crio-8ce9c988cada67591e7e30a6f13a0488354c48223bd65e44524bc49a7ca26937 WatchSource:0}: Error finding container 8ce9c988cada67591e7e30a6f13a0488354c48223bd65e44524bc49a7ca26937: Status 404 returned error can't find the container with id 8ce9c988cada67591e7e30a6f13a0488354c48223bd65e44524bc49a7ca26937 Dec 05 12:31:46 crc kubenswrapper[4711]: I1205 12:31:46.694193 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22e37a86-b630-47ee-ad7f-9c6cdcc2fe29" path="/var/lib/kubelet/pods/22e37a86-b630-47ee-ad7f-9c6cdcc2fe29/volumes" Dec 05 12:31:46 crc kubenswrapper[4711]: I1205 12:31:46.847166 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:46 crc kubenswrapper[4711]: I1205 12:31:46.847452 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerName="glance-log" containerID="cri-o://09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888" gracePeriod=30 Dec 05 12:31:46 crc kubenswrapper[4711]: I1205 12:31:46.847481 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerName="glance-httpd" containerID="cri-o://e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89" gracePeriod=30 Dec 05 12:31:47 crc kubenswrapper[4711]: I1205 12:31:47.610414 4711 generic.go:334] "Generic (PLEG): container finished" podID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerID="09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888" exitCode=143 Dec 05 12:31:47 crc kubenswrapper[4711]: I1205 12:31:47.610522 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d382a84e-a077-4aef-beac-c6be4347ebc3","Type":"ContainerDied","Data":"09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888"} Dec 05 12:31:47 crc kubenswrapper[4711]: I1205 12:31:47.617238 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d707502b-314c-43db-8347-fd6db2e3280e","Type":"ContainerStarted","Data":"b3afbc2955af3b9d9b1e3d2a4c48a607a53f8de930120144953074120c9726f0"} Dec 05 12:31:47 crc kubenswrapper[4711]: I1205 12:31:47.617284 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d707502b-314c-43db-8347-fd6db2e3280e","Type":"ContainerStarted","Data":"8ce9c988cada67591e7e30a6f13a0488354c48223bd65e44524bc49a7ca26937"} Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.300775 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.301437 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.301551 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.305433 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"39ed3b2d5509b3071d8cb2dc86698b520f2d0e9ed254bb880705f90278301c5c"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.305519 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://39ed3b2d5509b3071d8cb2dc86698b520f2d0e9ed254bb880705f90278301c5c" gracePeriod=600 Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.363499 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.447628 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-config-data\") pod \"d382a84e-a077-4aef-beac-c6be4347ebc3\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.447865 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-scripts\") pod \"d382a84e-a077-4aef-beac-c6be4347ebc3\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.448108 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-httpd-run\") pod \"d382a84e-a077-4aef-beac-c6be4347ebc3\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.448350 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-logs\") pod \"d382a84e-a077-4aef-beac-c6be4347ebc3\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.448544 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"d382a84e-a077-4aef-beac-c6be4347ebc3\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.448773 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gblp4\" (UniqueName: \"kubernetes.io/projected/d382a84e-a077-4aef-beac-c6be4347ebc3-kube-api-access-gblp4\") pod \"d382a84e-a077-4aef-beac-c6be4347ebc3\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.449119 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-internal-tls-certs\") pod \"d382a84e-a077-4aef-beac-c6be4347ebc3\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.449263 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-combined-ca-bundle\") pod \"d382a84e-a077-4aef-beac-c6be4347ebc3\" (UID: \"d382a84e-a077-4aef-beac-c6be4347ebc3\") " Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.455918 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d382a84e-a077-4aef-beac-c6be4347ebc3" (UID: "d382a84e-a077-4aef-beac-c6be4347ebc3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.456076 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-logs" (OuterVolumeSpecName: "logs") pod "d382a84e-a077-4aef-beac-c6be4347ebc3" (UID: "d382a84e-a077-4aef-beac-c6be4347ebc3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.459690 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d382a84e-a077-4aef-beac-c6be4347ebc3-kube-api-access-gblp4" (OuterVolumeSpecName: "kube-api-access-gblp4") pod "d382a84e-a077-4aef-beac-c6be4347ebc3" (UID: "d382a84e-a077-4aef-beac-c6be4347ebc3"). InnerVolumeSpecName "kube-api-access-gblp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.469294 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-scripts" (OuterVolumeSpecName: "scripts") pod "d382a84e-a077-4aef-beac-c6be4347ebc3" (UID: "d382a84e-a077-4aef-beac-c6be4347ebc3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.482452 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "d382a84e-a077-4aef-beac-c6be4347ebc3" (UID: "d382a84e-a077-4aef-beac-c6be4347ebc3"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.522652 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d382a84e-a077-4aef-beac-c6be4347ebc3" (UID: "d382a84e-a077-4aef-beac-c6be4347ebc3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.549979 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-config-data" (OuterVolumeSpecName: "config-data") pod "d382a84e-a077-4aef-beac-c6be4347ebc3" (UID: "d382a84e-a077-4aef-beac-c6be4347ebc3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.554549 4711 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.554677 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d382a84e-a077-4aef-beac-c6be4347ebc3-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.554783 4711 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.554800 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gblp4\" (UniqueName: \"kubernetes.io/projected/d382a84e-a077-4aef-beac-c6be4347ebc3-kube-api-access-gblp4\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.554816 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.554827 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.554837 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.569654 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d382a84e-a077-4aef-beac-c6be4347ebc3" (UID: "d382a84e-a077-4aef-beac-c6be4347ebc3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.595431 4711 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.647563 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="39ed3b2d5509b3071d8cb2dc86698b520f2d0e9ed254bb880705f90278301c5c" exitCode=0 Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.647635 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"39ed3b2d5509b3071d8cb2dc86698b520f2d0e9ed254bb880705f90278301c5c"} Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.647668 4711 scope.go:117] "RemoveContainer" containerID="165f762379e5d3244fd9c1d378fa831957f0f903eab36ae386a24b293865ce1b" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.652107 4711 generic.go:334] "Generic (PLEG): container finished" podID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerID="e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89" exitCode=0 Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.652170 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d382a84e-a077-4aef-beac-c6be4347ebc3","Type":"ContainerDied","Data":"e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89"} Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.652194 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d382a84e-a077-4aef-beac-c6be4347ebc3","Type":"ContainerDied","Data":"9224f080e03eb253a39da83a4e6d21fc34add1bb7c8bc88cea4cb808ebd6bdd6"} Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.652263 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.656221 4711 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.656577 4711 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d382a84e-a077-4aef-beac-c6be4347ebc3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.657227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d707502b-314c-43db-8347-fd6db2e3280e","Type":"ContainerStarted","Data":"5a35ff0dd13a0e176bfc705250ad9e4b258dbda8a0c0d14ce42de3e6fdf12fa0"} Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.701304 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.701275153 podStartE2EDuration="3.701275153s" podCreationTimestamp="2025-12-05 12:31:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:48.68812702 +0000 UTC m=+1354.272449360" watchObservedRunningTime="2025-12-05 12:31:48.701275153 +0000 UTC m=+1354.285597483" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.717078 4711 scope.go:117] "RemoveContainer" containerID="e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.752028 4711 scope.go:117] "RemoveContainer" containerID="09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.771647 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.788107 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.798170 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:48 crc kubenswrapper[4711]: E1205 12:31:48.799280 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerName="glance-log" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.799311 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerName="glance-log" Dec 05 12:31:48 crc kubenswrapper[4711]: E1205 12:31:48.799340 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerName="glance-httpd" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.799349 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerName="glance-httpd" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.799611 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerName="glance-httpd" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.799638 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d382a84e-a077-4aef-beac-c6be4347ebc3" containerName="glance-log" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.800899 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.802629 4711 scope.go:117] "RemoveContainer" containerID="e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89" Dec 05 12:31:48 crc kubenswrapper[4711]: E1205 12:31:48.803832 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89\": container with ID starting with e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89 not found: ID does not exist" containerID="e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.803874 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89"} err="failed to get container status \"e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89\": rpc error: code = NotFound desc = could not find container \"e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89\": container with ID starting with e74a6ced88f3bb79524a819b6235170c562de65884bdc2c0520adde3aeb81b89 not found: ID does not exist" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.803898 4711 scope.go:117] "RemoveContainer" containerID="09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.804229 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 12:31:48 crc kubenswrapper[4711]: E1205 12:31:48.804413 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888\": container with ID starting with 09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888 not found: ID does not exist" containerID="09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.804431 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888"} err="failed to get container status \"09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888\": rpc error: code = NotFound desc = could not find container \"09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888\": container with ID starting with 09025549ba634c0c418e3414b98ae34e78632817700d3c58dda073632b1f4888 not found: ID does not exist" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.804528 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.823430 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.863497 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.863752 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.863794 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.863824 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.863913 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl767\" (UniqueName: \"kubernetes.io/projected/8428680c-7087-4734-9868-462045c17653-kube-api-access-cl767\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.864011 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8428680c-7087-4734-9868-462045c17653-logs\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.864035 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.864062 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8428680c-7087-4734-9868-462045c17653-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.966015 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl767\" (UniqueName: \"kubernetes.io/projected/8428680c-7087-4734-9868-462045c17653-kube-api-access-cl767\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.966202 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8428680c-7087-4734-9868-462045c17653-logs\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.966228 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.966249 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8428680c-7087-4734-9868-462045c17653-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.966365 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.966413 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.966438 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.966497 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.967056 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8428680c-7087-4734-9868-462045c17653-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.967119 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8428680c-7087-4734-9868-462045c17653-logs\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.968149 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.972783 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.973204 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.975704 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.983557 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8428680c-7087-4734-9868-462045c17653-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:48 crc kubenswrapper[4711]: I1205 12:31:48.987521 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl767\" (UniqueName: \"kubernetes.io/projected/8428680c-7087-4734-9868-462045c17653-kube-api-access-cl767\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:49 crc kubenswrapper[4711]: I1205 12:31:49.041496 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"8428680c-7087-4734-9868-462045c17653\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:31:49 crc kubenswrapper[4711]: I1205 12:31:49.127949 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:49 crc kubenswrapper[4711]: I1205 12:31:49.697782 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0"} Dec 05 12:31:49 crc kubenswrapper[4711]: I1205 12:31:49.755192 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:31:49 crc kubenswrapper[4711]: W1205 12:31:49.763246 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8428680c_7087_4734_9868_462045c17653.slice/crio-166132c080ca8f0b511711b92a6849a0ed2183be5772dd72df07417445db31b4 WatchSource:0}: Error finding container 166132c080ca8f0b511711b92a6849a0ed2183be5772dd72df07417445db31b4: Status 404 returned error can't find the container with id 166132c080ca8f0b511711b92a6849a0ed2183be5772dd72df07417445db31b4 Dec 05 12:31:50 crc kubenswrapper[4711]: I1205 12:31:50.697117 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d382a84e-a077-4aef-beac-c6be4347ebc3" path="/var/lib/kubelet/pods/d382a84e-a077-4aef-beac-c6be4347ebc3/volumes" Dec 05 12:31:50 crc kubenswrapper[4711]: I1205 12:31:50.712047 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8428680c-7087-4734-9868-462045c17653","Type":"ContainerStarted","Data":"9ff782f543195521e3bdc2575d7891b6f9bc45e3c899059c96bec4f97b522a0f"} Dec 05 12:31:50 crc kubenswrapper[4711]: I1205 12:31:50.712098 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8428680c-7087-4734-9868-462045c17653","Type":"ContainerStarted","Data":"166132c080ca8f0b511711b92a6849a0ed2183be5772dd72df07417445db31b4"} Dec 05 12:31:50 crc kubenswrapper[4711]: I1205 12:31:50.849982 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:31:50 crc kubenswrapper[4711]: I1205 12:31:50.850379 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:31:50 crc kubenswrapper[4711]: I1205 12:31:50.850437 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:31:50 crc kubenswrapper[4711]: I1205 12:31:50.851565 4711 scope.go:117] "RemoveContainer" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" Dec 05 12:31:50 crc kubenswrapper[4711]: E1205 12:31:50.852101 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.587359 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.630969 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-combined-ca-bundle\") pod \"b64b5e93-159f-42be-b8ff-515174839b73\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.631084 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-run-httpd\") pod \"b64b5e93-159f-42be-b8ff-515174839b73\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.631783 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b64b5e93-159f-42be-b8ff-515174839b73" (UID: "b64b5e93-159f-42be-b8ff-515174839b73"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.631877 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-scripts\") pod \"b64b5e93-159f-42be-b8ff-515174839b73\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.631935 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-sg-core-conf-yaml\") pod \"b64b5e93-159f-42be-b8ff-515174839b73\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.631954 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-log-httpd\") pod \"b64b5e93-159f-42be-b8ff-515174839b73\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.632274 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqgbh\" (UniqueName: \"kubernetes.io/projected/b64b5e93-159f-42be-b8ff-515174839b73-kube-api-access-fqgbh\") pod \"b64b5e93-159f-42be-b8ff-515174839b73\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.632310 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-config-data\") pod \"b64b5e93-159f-42be-b8ff-515174839b73\" (UID: \"b64b5e93-159f-42be-b8ff-515174839b73\") " Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.632836 4711 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.633998 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b64b5e93-159f-42be-b8ff-515174839b73" (UID: "b64b5e93-159f-42be-b8ff-515174839b73"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.636635 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-scripts" (OuterVolumeSpecName: "scripts") pod "b64b5e93-159f-42be-b8ff-515174839b73" (UID: "b64b5e93-159f-42be-b8ff-515174839b73"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.636862 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b64b5e93-159f-42be-b8ff-515174839b73-kube-api-access-fqgbh" (OuterVolumeSpecName: "kube-api-access-fqgbh") pod "b64b5e93-159f-42be-b8ff-515174839b73" (UID: "b64b5e93-159f-42be-b8ff-515174839b73"). InnerVolumeSpecName "kube-api-access-fqgbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.661622 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b64b5e93-159f-42be-b8ff-515174839b73" (UID: "b64b5e93-159f-42be-b8ff-515174839b73"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.735278 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.742134 4711 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.742245 4711 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b64b5e93-159f-42be-b8ff-515174839b73-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.742331 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqgbh\" (UniqueName: \"kubernetes.io/projected/b64b5e93-159f-42be-b8ff-515174839b73-kube-api-access-fqgbh\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.752492 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8428680c-7087-4734-9868-462045c17653","Type":"ContainerStarted","Data":"b9d88066d98741e4a1c35f181611876357e71ccbf88e1f0acb781abf686514ff"} Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.755727 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b64b5e93-159f-42be-b8ff-515174839b73" (UID: "b64b5e93-159f-42be-b8ff-515174839b73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.758266 4711 generic.go:334] "Generic (PLEG): container finished" podID="b64b5e93-159f-42be-b8ff-515174839b73" containerID="1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77" exitCode=0 Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.758351 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerDied","Data":"1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77"} Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.758431 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b64b5e93-159f-42be-b8ff-515174839b73","Type":"ContainerDied","Data":"b26932add23e62787947f2be2138543eb0fe871b2d6a15ba693a24e58f95dbdb"} Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.758457 4711 scope.go:117] "RemoveContainer" containerID="57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.758609 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.758883 4711 scope.go:117] "RemoveContainer" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" Dec 05 12:31:51 crc kubenswrapper[4711]: E1205 12:31:51.759146 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.773868 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.773844794 podStartE2EDuration="3.773844794s" podCreationTimestamp="2025-12-05 12:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:31:51.772274406 +0000 UTC m=+1357.356596756" watchObservedRunningTime="2025-12-05 12:31:51.773844794 +0000 UTC m=+1357.358167124" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.793877 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-config-data" (OuterVolumeSpecName: "config-data") pod "b64b5e93-159f-42be-b8ff-515174839b73" (UID: "b64b5e93-159f-42be-b8ff-515174839b73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.844160 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:51 crc kubenswrapper[4711]: I1205 12:31:51.844194 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64b5e93-159f-42be-b8ff-515174839b73-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.105628 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.126550 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.136426 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:52 crc kubenswrapper[4711]: E1205 12:31:52.136886 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="ceilometer-central-agent" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.136906 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="ceilometer-central-agent" Dec 05 12:31:52 crc kubenswrapper[4711]: E1205 12:31:52.136921 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="ceilometer-notification-agent" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.136928 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="ceilometer-notification-agent" Dec 05 12:31:52 crc kubenswrapper[4711]: E1205 12:31:52.136950 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="sg-core" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.136956 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="sg-core" Dec 05 12:31:52 crc kubenswrapper[4711]: E1205 12:31:52.136971 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="proxy-httpd" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.136978 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="proxy-httpd" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.137189 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="ceilometer-central-agent" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.137210 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="ceilometer-notification-agent" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.137224 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="sg-core" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.137239 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b64b5e93-159f-42be-b8ff-515174839b73" containerName="proxy-httpd" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.138916 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.141812 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.142006 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.151210 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.251462 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-log-httpd\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.251572 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-scripts\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.251625 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-config-data\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.251734 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-run-httpd\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.251789 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.251899 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.251944 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxgq7\" (UniqueName: \"kubernetes.io/projected/a418cfd1-7115-4cda-924a-78ae19aa6b2c-kube-api-access-qxgq7\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.353404 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-run-httpd\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.353465 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.353555 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.353583 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxgq7\" (UniqueName: \"kubernetes.io/projected/a418cfd1-7115-4cda-924a-78ae19aa6b2c-kube-api-access-qxgq7\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.353674 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-log-httpd\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.353904 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-scripts\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.353938 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-config-data\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.354085 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-run-httpd\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.356927 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-log-httpd\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.364552 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.364565 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-scripts\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.364998 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.370554 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-config-data\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.373003 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxgq7\" (UniqueName: \"kubernetes.io/projected/a418cfd1-7115-4cda-924a-78ae19aa6b2c-kube-api-access-qxgq7\") pod \"ceilometer-0\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.453967 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:31:52 crc kubenswrapper[4711]: I1205 12:31:52.696640 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b64b5e93-159f-42be-b8ff-515174839b73" path="/var/lib/kubelet/pods/b64b5e93-159f-42be-b8ff-515174839b73/volumes" Dec 05 12:31:56 crc kubenswrapper[4711]: I1205 12:31:56.003043 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 12:31:56 crc kubenswrapper[4711]: I1205 12:31:56.003642 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 12:31:56 crc kubenswrapper[4711]: I1205 12:31:56.040420 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 12:31:56 crc kubenswrapper[4711]: I1205 12:31:56.057423 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 12:31:56 crc kubenswrapper[4711]: I1205 12:31:56.827487 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 12:31:56 crc kubenswrapper[4711]: I1205 12:31:56.827634 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 12:31:56 crc kubenswrapper[4711]: I1205 12:31:56.836151 4711 scope.go:117] "RemoveContainer" containerID="e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.086655 4711 scope.go:117] "RemoveContainer" containerID="d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.151214 4711 scope.go:117] "RemoveContainer" containerID="1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.200755 4711 scope.go:117] "RemoveContainer" containerID="57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8" Dec 05 12:31:57 crc kubenswrapper[4711]: E1205 12:31:57.201356 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8\": container with ID starting with 57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8 not found: ID does not exist" containerID="57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.201413 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8"} err="failed to get container status \"57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8\": rpc error: code = NotFound desc = could not find container \"57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8\": container with ID starting with 57e00cd10b477dafb8a2fab01711171c4c25093ea9f10d635144890ef73303d8 not found: ID does not exist" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.201439 4711 scope.go:117] "RemoveContainer" containerID="e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f" Dec 05 12:31:57 crc kubenswrapper[4711]: E1205 12:31:57.201756 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f\": container with ID starting with e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f not found: ID does not exist" containerID="e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.201803 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f"} err="failed to get container status \"e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f\": rpc error: code = NotFound desc = could not find container \"e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f\": container with ID starting with e08d5d107aa79d76a6aef6f30ab828be0e9f682f3a1e9857074d32c5135b9e3f not found: ID does not exist" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.201830 4711 scope.go:117] "RemoveContainer" containerID="d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf" Dec 05 12:31:57 crc kubenswrapper[4711]: E1205 12:31:57.202178 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf\": container with ID starting with d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf not found: ID does not exist" containerID="d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.202224 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf"} err="failed to get container status \"d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf\": rpc error: code = NotFound desc = could not find container \"d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf\": container with ID starting with d4ec5d689d9621041361607a3d15c5a83f8051fc7cb225cd44e4524e393fcedf not found: ID does not exist" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.202256 4711 scope.go:117] "RemoveContainer" containerID="1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77" Dec 05 12:31:57 crc kubenswrapper[4711]: E1205 12:31:57.202532 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77\": container with ID starting with 1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77 not found: ID does not exist" containerID="1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.202563 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77"} err="failed to get container status \"1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77\": rpc error: code = NotFound desc = could not find container \"1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77\": container with ID starting with 1bf8bbb04a668db5e68ff5f3d49927b5cf102bd8e619b65f7472f4520ebbef77 not found: ID does not exist" Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.659099 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:31:57 crc kubenswrapper[4711]: W1205 12:31:57.661487 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda418cfd1_7115_4cda_924a_78ae19aa6b2c.slice/crio-ffad5dad8a4e54372911b9cda2669b9744a4268d73435a2499ed0ae41473ab7d WatchSource:0}: Error finding container ffad5dad8a4e54372911b9cda2669b9744a4268d73435a2499ed0ae41473ab7d: Status 404 returned error can't find the container with id ffad5dad8a4e54372911b9cda2669b9744a4268d73435a2499ed0ae41473ab7d Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.842203 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k9dtz" event={"ID":"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8","Type":"ContainerStarted","Data":"5de956d357f566a1ea7a690fd999f7c088c3eb2aeddf92d294ff98ed175a10cc"} Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.844130 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerStarted","Data":"ffad5dad8a4e54372911b9cda2669b9744a4268d73435a2499ed0ae41473ab7d"} Dec 05 12:31:57 crc kubenswrapper[4711]: I1205 12:31:57.864108 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-k9dtz" podStartSLOduration=2.438177773 podStartE2EDuration="13.864090925s" podCreationTimestamp="2025-12-05 12:31:44 +0000 UTC" firstStartedPulling="2025-12-05 12:31:45.776915225 +0000 UTC m=+1351.361237555" lastFinishedPulling="2025-12-05 12:31:57.202828377 +0000 UTC m=+1362.787150707" observedRunningTime="2025-12-05 12:31:57.857006081 +0000 UTC m=+1363.441328411" watchObservedRunningTime="2025-12-05 12:31:57.864090925 +0000 UTC m=+1363.448413255" Dec 05 12:31:58 crc kubenswrapper[4711]: I1205 12:31:58.861724 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerStarted","Data":"cf70938a9bc89b04a62ea58d1e2346227e800e8b23950427313a324a4d2ae8e8"} Dec 05 12:31:58 crc kubenswrapper[4711]: I1205 12:31:58.862202 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerStarted","Data":"45de59bf91dd62abb65359d74ad55431b25c20ef3c51a944ecd645b32d5da4df"} Dec 05 12:31:58 crc kubenswrapper[4711]: I1205 12:31:58.862006 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:31:58 crc kubenswrapper[4711]: I1205 12:31:58.862231 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:31:59 crc kubenswrapper[4711]: I1205 12:31:59.128570 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:59 crc kubenswrapper[4711]: I1205 12:31:59.128878 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:59 crc kubenswrapper[4711]: I1205 12:31:59.181046 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:59 crc kubenswrapper[4711]: I1205 12:31:59.200359 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:59 crc kubenswrapper[4711]: I1205 12:31:59.498159 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 12:31:59 crc kubenswrapper[4711]: I1205 12:31:59.501517 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 12:31:59 crc kubenswrapper[4711]: I1205 12:31:59.874460 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerStarted","Data":"ce1fbc293dc7d317747417a534b17562a5869658a4c142fa345e304da98695d8"} Dec 05 12:31:59 crc kubenswrapper[4711]: I1205 12:31:59.875081 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 12:31:59 crc kubenswrapper[4711]: I1205 12:31:59.875310 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 12:32:00 crc kubenswrapper[4711]: I1205 12:32:00.909355 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerStarted","Data":"dfe75bd85a432261ae1603afc32bd5d061830322dc37bfc97407d24dd3177d3e"} Dec 05 12:32:00 crc kubenswrapper[4711]: I1205 12:32:00.954078 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=6.610285823 podStartE2EDuration="8.954058544s" podCreationTimestamp="2025-12-05 12:31:52 +0000 UTC" firstStartedPulling="2025-12-05 12:31:57.664178459 +0000 UTC m=+1363.248500789" lastFinishedPulling="2025-12-05 12:32:00.00795118 +0000 UTC m=+1365.592273510" observedRunningTime="2025-12-05 12:32:00.937945148 +0000 UTC m=+1366.522267478" watchObservedRunningTime="2025-12-05 12:32:00.954058544 +0000 UTC m=+1366.538380874" Dec 05 12:32:01 crc kubenswrapper[4711]: I1205 12:32:01.920174 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:32:02 crc kubenswrapper[4711]: I1205 12:32:02.585227 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 12:32:02 crc kubenswrapper[4711]: I1205 12:32:02.585610 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:32:02 crc kubenswrapper[4711]: I1205 12:32:02.587982 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 12:32:02 crc kubenswrapper[4711]: I1205 12:32:02.685161 4711 scope.go:117] "RemoveContainer" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" Dec 05 12:32:02 crc kubenswrapper[4711]: E1205 12:32:02.685467 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.404893 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.405959 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="ceilometer-central-agent" containerID="cri-o://45de59bf91dd62abb65359d74ad55431b25c20ef3c51a944ecd645b32d5da4df" gracePeriod=30 Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.406073 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="proxy-httpd" containerID="cri-o://dfe75bd85a432261ae1603afc32bd5d061830322dc37bfc97407d24dd3177d3e" gracePeriod=30 Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.406052 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="ceilometer-notification-agent" containerID="cri-o://cf70938a9bc89b04a62ea58d1e2346227e800e8b23950427313a324a4d2ae8e8" gracePeriod=30 Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.406055 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="sg-core" containerID="cri-o://ce1fbc293dc7d317747417a534b17562a5869658a4c142fa345e304da98695d8" gracePeriod=30 Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.987845 4711 generic.go:334] "Generic (PLEG): container finished" podID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerID="dfe75bd85a432261ae1603afc32bd5d061830322dc37bfc97407d24dd3177d3e" exitCode=0 Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.988123 4711 generic.go:334] "Generic (PLEG): container finished" podID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerID="ce1fbc293dc7d317747417a534b17562a5869658a4c142fa345e304da98695d8" exitCode=2 Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.988132 4711 generic.go:334] "Generic (PLEG): container finished" podID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerID="45de59bf91dd62abb65359d74ad55431b25c20ef3c51a944ecd645b32d5da4df" exitCode=0 Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.987942 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerDied","Data":"dfe75bd85a432261ae1603afc32bd5d061830322dc37bfc97407d24dd3177d3e"} Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.988183 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerDied","Data":"ce1fbc293dc7d317747417a534b17562a5869658a4c142fa345e304da98695d8"} Dec 05 12:32:08 crc kubenswrapper[4711]: I1205 12:32:08.988202 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerDied","Data":"45de59bf91dd62abb65359d74ad55431b25c20ef3c51a944ecd645b32d5da4df"} Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.010797 4711 generic.go:334] "Generic (PLEG): container finished" podID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerID="cf70938a9bc89b04a62ea58d1e2346227e800e8b23950427313a324a4d2ae8e8" exitCode=0 Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.010856 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerDied","Data":"cf70938a9bc89b04a62ea58d1e2346227e800e8b23950427313a324a4d2ae8e8"} Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.155143 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.322646 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-run-httpd\") pod \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.322790 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-combined-ca-bundle\") pod \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.322846 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-sg-core-conf-yaml\") pod \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.322916 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-scripts\") pod \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.323023 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-log-httpd\") pod \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.323188 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxgq7\" (UniqueName: \"kubernetes.io/projected/a418cfd1-7115-4cda-924a-78ae19aa6b2c-kube-api-access-qxgq7\") pod \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.323193 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a418cfd1-7115-4cda-924a-78ae19aa6b2c" (UID: "a418cfd1-7115-4cda-924a-78ae19aa6b2c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.323303 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-config-data\") pod \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\" (UID: \"a418cfd1-7115-4cda-924a-78ae19aa6b2c\") " Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.323639 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a418cfd1-7115-4cda-924a-78ae19aa6b2c" (UID: "a418cfd1-7115-4cda-924a-78ae19aa6b2c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.323736 4711 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.323752 4711 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a418cfd1-7115-4cda-924a-78ae19aa6b2c-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.330374 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a418cfd1-7115-4cda-924a-78ae19aa6b2c-kube-api-access-qxgq7" (OuterVolumeSpecName: "kube-api-access-qxgq7") pod "a418cfd1-7115-4cda-924a-78ae19aa6b2c" (UID: "a418cfd1-7115-4cda-924a-78ae19aa6b2c"). InnerVolumeSpecName "kube-api-access-qxgq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.342571 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-scripts" (OuterVolumeSpecName: "scripts") pod "a418cfd1-7115-4cda-924a-78ae19aa6b2c" (UID: "a418cfd1-7115-4cda-924a-78ae19aa6b2c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.352667 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a418cfd1-7115-4cda-924a-78ae19aa6b2c" (UID: "a418cfd1-7115-4cda-924a-78ae19aa6b2c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.416222 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a418cfd1-7115-4cda-924a-78ae19aa6b2c" (UID: "a418cfd1-7115-4cda-924a-78ae19aa6b2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.425106 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.425146 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxgq7\" (UniqueName: \"kubernetes.io/projected/a418cfd1-7115-4cda-924a-78ae19aa6b2c-kube-api-access-qxgq7\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.425159 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.425170 4711 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.448048 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-config-data" (OuterVolumeSpecName: "config-data") pod "a418cfd1-7115-4cda-924a-78ae19aa6b2c" (UID: "a418cfd1-7115-4cda-924a-78ae19aa6b2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:11 crc kubenswrapper[4711]: I1205 12:32:11.526956 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a418cfd1-7115-4cda-924a-78ae19aa6b2c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.025709 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a418cfd1-7115-4cda-924a-78ae19aa6b2c","Type":"ContainerDied","Data":"ffad5dad8a4e54372911b9cda2669b9744a4268d73435a2499ed0ae41473ab7d"} Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.025762 4711 scope.go:117] "RemoveContainer" containerID="dfe75bd85a432261ae1603afc32bd5d061830322dc37bfc97407d24dd3177d3e" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.025921 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.073900 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.079794 4711 scope.go:117] "RemoveContainer" containerID="ce1fbc293dc7d317747417a534b17562a5869658a4c142fa345e304da98695d8" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.085471 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.104012 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:12 crc kubenswrapper[4711]: E1205 12:32:12.104570 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="ceilometer-central-agent" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.104594 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="ceilometer-central-agent" Dec 05 12:32:12 crc kubenswrapper[4711]: E1205 12:32:12.104619 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="sg-core" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.104627 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="sg-core" Dec 05 12:32:12 crc kubenswrapper[4711]: E1205 12:32:12.104655 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="ceilometer-notification-agent" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.104663 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="ceilometer-notification-agent" Dec 05 12:32:12 crc kubenswrapper[4711]: E1205 12:32:12.104687 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="proxy-httpd" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.104694 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="proxy-httpd" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.104908 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="ceilometer-notification-agent" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.104934 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="sg-core" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.104959 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="ceilometer-central-agent" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.104974 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" containerName="proxy-httpd" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.108318 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.113055 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.114796 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.117141 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.120794 4711 scope.go:117] "RemoveContainer" containerID="cf70938a9bc89b04a62ea58d1e2346227e800e8b23950427313a324a4d2ae8e8" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.150607 4711 scope.go:117] "RemoveContainer" containerID="45de59bf91dd62abb65359d74ad55431b25c20ef3c51a944ecd645b32d5da4df" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.240791 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q99tv\" (UniqueName: \"kubernetes.io/projected/0670acaa-f6cd-4026-89d5-8615a3965179-kube-api-access-q99tv\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.240845 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-log-httpd\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.241622 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-run-httpd\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.242158 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-scripts\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.242496 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-config-data\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.242544 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.242570 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.349222 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-config-data\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.349309 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.349342 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.349444 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q99tv\" (UniqueName: \"kubernetes.io/projected/0670acaa-f6cd-4026-89d5-8615a3965179-kube-api-access-q99tv\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.349485 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-log-httpd\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.349514 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-run-httpd\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.349677 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-scripts\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.357589 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-scripts\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.359374 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-log-httpd\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.359706 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-run-httpd\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.362813 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-config-data\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.364763 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.368799 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.376597 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q99tv\" (UniqueName: \"kubernetes.io/projected/0670acaa-f6cd-4026-89d5-8615a3965179-kube-api-access-q99tv\") pod \"ceilometer-0\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.434288 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.697837 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a418cfd1-7115-4cda-924a-78ae19aa6b2c" path="/var/lib/kubelet/pods/a418cfd1-7115-4cda-924a-78ae19aa6b2c/volumes" Dec 05 12:32:12 crc kubenswrapper[4711]: I1205 12:32:12.920777 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:13 crc kubenswrapper[4711]: I1205 12:32:13.052643 4711 generic.go:334] "Generic (PLEG): container finished" podID="6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8" containerID="5de956d357f566a1ea7a690fd999f7c088c3eb2aeddf92d294ff98ed175a10cc" exitCode=0 Dec 05 12:32:13 crc kubenswrapper[4711]: I1205 12:32:13.052708 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k9dtz" event={"ID":"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8","Type":"ContainerDied","Data":"5de956d357f566a1ea7a690fd999f7c088c3eb2aeddf92d294ff98ed175a10cc"} Dec 05 12:32:13 crc kubenswrapper[4711]: I1205 12:32:13.054169 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerStarted","Data":"c1ff534f6297a8696d65dbee7586f59a8572172c9b5c351f17958e2040c9f741"} Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.067689 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerStarted","Data":"9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce"} Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.067749 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerStarted","Data":"8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde"} Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.491708 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.509826 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-scripts\") pod \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.510298 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-config-data\") pod \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.510434 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgwh6\" (UniqueName: \"kubernetes.io/projected/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-kube-api-access-wgwh6\") pod \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.510509 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-combined-ca-bundle\") pod \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\" (UID: \"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8\") " Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.515967 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-scripts" (OuterVolumeSpecName: "scripts") pod "6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8" (UID: "6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.519195 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-kube-api-access-wgwh6" (OuterVolumeSpecName: "kube-api-access-wgwh6") pod "6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8" (UID: "6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8"). InnerVolumeSpecName "kube-api-access-wgwh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.556172 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8" (UID: "6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.597591 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-config-data" (OuterVolumeSpecName: "config-data") pod "6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8" (UID: "6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.612652 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.612693 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.612707 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:14 crc kubenswrapper[4711]: I1205 12:32:14.612742 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgwh6\" (UniqueName: \"kubernetes.io/projected/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8-kube-api-access-wgwh6\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.088227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k9dtz" event={"ID":"6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8","Type":"ContainerDied","Data":"c25901bdc495311f1474ca4234f51a9425cf4d473b2e39bd5915eee847ecf7a3"} Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.088269 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k9dtz" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.088276 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c25901bdc495311f1474ca4234f51a9425cf4d473b2e39bd5915eee847ecf7a3" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.091396 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerStarted","Data":"eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6"} Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.212665 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:32:15 crc kubenswrapper[4711]: E1205 12:32:15.215801 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8" containerName="nova-cell0-conductor-db-sync" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.215845 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8" containerName="nova-cell0-conductor-db-sync" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.216171 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8" containerName="nova-cell0-conductor-db-sync" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.217060 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.219943 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.225027 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.233832 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mwm2w" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.326586 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728a3b65-c3e7-4ebf-b8b7-b1483643e4de-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"728a3b65-c3e7-4ebf-b8b7-b1483643e4de\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.326925 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fpm4\" (UniqueName: \"kubernetes.io/projected/728a3b65-c3e7-4ebf-b8b7-b1483643e4de-kube-api-access-9fpm4\") pod \"nova-cell0-conductor-0\" (UID: \"728a3b65-c3e7-4ebf-b8b7-b1483643e4de\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.326980 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728a3b65-c3e7-4ebf-b8b7-b1483643e4de-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"728a3b65-c3e7-4ebf-b8b7-b1483643e4de\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.428372 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728a3b65-c3e7-4ebf-b8b7-b1483643e4de-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"728a3b65-c3e7-4ebf-b8b7-b1483643e4de\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.428556 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728a3b65-c3e7-4ebf-b8b7-b1483643e4de-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"728a3b65-c3e7-4ebf-b8b7-b1483643e4de\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.428585 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fpm4\" (UniqueName: \"kubernetes.io/projected/728a3b65-c3e7-4ebf-b8b7-b1483643e4de-kube-api-access-9fpm4\") pod \"nova-cell0-conductor-0\" (UID: \"728a3b65-c3e7-4ebf-b8b7-b1483643e4de\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.432118 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728a3b65-c3e7-4ebf-b8b7-b1483643e4de-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"728a3b65-c3e7-4ebf-b8b7-b1483643e4de\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.434374 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728a3b65-c3e7-4ebf-b8b7-b1483643e4de-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"728a3b65-c3e7-4ebf-b8b7-b1483643e4de\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.449017 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fpm4\" (UniqueName: \"kubernetes.io/projected/728a3b65-c3e7-4ebf-b8b7-b1483643e4de-kube-api-access-9fpm4\") pod \"nova-cell0-conductor-0\" (UID: \"728a3b65-c3e7-4ebf-b8b7-b1483643e4de\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.553048 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:15 crc kubenswrapper[4711]: I1205 12:32:15.995329 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:32:15 crc kubenswrapper[4711]: W1205 12:32:15.995705 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod728a3b65_c3e7_4ebf_b8b7_b1483643e4de.slice/crio-6195c2cb9482c6158afb54be8916edeaa2e1fadb031783aebfd2dbd85a1df85c WatchSource:0}: Error finding container 6195c2cb9482c6158afb54be8916edeaa2e1fadb031783aebfd2dbd85a1df85c: Status 404 returned error can't find the container with id 6195c2cb9482c6158afb54be8916edeaa2e1fadb031783aebfd2dbd85a1df85c Dec 05 12:32:16 crc kubenswrapper[4711]: I1205 12:32:16.103357 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"728a3b65-c3e7-4ebf-b8b7-b1483643e4de","Type":"ContainerStarted","Data":"6195c2cb9482c6158afb54be8916edeaa2e1fadb031783aebfd2dbd85a1df85c"} Dec 05 12:32:16 crc kubenswrapper[4711]: I1205 12:32:16.114844 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerStarted","Data":"886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7"} Dec 05 12:32:16 crc kubenswrapper[4711]: I1205 12:32:16.114978 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:32:16 crc kubenswrapper[4711]: I1205 12:32:16.146008 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.759483877 podStartE2EDuration="4.145989009s" podCreationTimestamp="2025-12-05 12:32:12 +0000 UTC" firstStartedPulling="2025-12-05 12:32:12.927000227 +0000 UTC m=+1378.511322567" lastFinishedPulling="2025-12-05 12:32:15.313505369 +0000 UTC m=+1380.897827699" observedRunningTime="2025-12-05 12:32:16.131947824 +0000 UTC m=+1381.716270154" watchObservedRunningTime="2025-12-05 12:32:16.145989009 +0000 UTC m=+1381.730311339" Dec 05 12:32:16 crc kubenswrapper[4711]: I1205 12:32:16.257759 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:17 crc kubenswrapper[4711]: I1205 12:32:17.125605 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"728a3b65-c3e7-4ebf-b8b7-b1483643e4de","Type":"ContainerStarted","Data":"6dd54272d76a7bc04f861add8def29c2f7abdd34327633d702d3c47354f834a7"} Dec 05 12:32:17 crc kubenswrapper[4711]: I1205 12:32:17.149758 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.14972853 podStartE2EDuration="2.14972853s" podCreationTimestamp="2025-12-05 12:32:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:32:17.13955274 +0000 UTC m=+1382.723875120" watchObservedRunningTime="2025-12-05 12:32:17.14972853 +0000 UTC m=+1382.734050880" Dec 05 12:32:17 crc kubenswrapper[4711]: I1205 12:32:17.683372 4711 scope.go:117] "RemoveContainer" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" Dec 05 12:32:17 crc kubenswrapper[4711]: E1205 12:32:17.683628 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(2fe97f8e-7a0e-40ef-8cab-3530224b79ee)\"" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" Dec 05 12:32:18 crc kubenswrapper[4711]: I1205 12:32:18.133692 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="ceilometer-central-agent" containerID="cri-o://8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde" gracePeriod=30 Dec 05 12:32:18 crc kubenswrapper[4711]: I1205 12:32:18.134546 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="sg-core" containerID="cri-o://eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6" gracePeriod=30 Dec 05 12:32:18 crc kubenswrapper[4711]: I1205 12:32:18.134563 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="proxy-httpd" containerID="cri-o://886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7" gracePeriod=30 Dec 05 12:32:18 crc kubenswrapper[4711]: I1205 12:32:18.134727 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="ceilometer-notification-agent" containerID="cri-o://9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce" gracePeriod=30 Dec 05 12:32:18 crc kubenswrapper[4711]: I1205 12:32:18.134799 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:19 crc kubenswrapper[4711]: I1205 12:32:19.161947 4711 generic.go:334] "Generic (PLEG): container finished" podID="0670acaa-f6cd-4026-89d5-8615a3965179" containerID="886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7" exitCode=0 Dec 05 12:32:19 crc kubenswrapper[4711]: I1205 12:32:19.163343 4711 generic.go:334] "Generic (PLEG): container finished" podID="0670acaa-f6cd-4026-89d5-8615a3965179" containerID="eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6" exitCode=2 Dec 05 12:32:19 crc kubenswrapper[4711]: I1205 12:32:19.163480 4711 generic.go:334] "Generic (PLEG): container finished" podID="0670acaa-f6cd-4026-89d5-8615a3965179" containerID="9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce" exitCode=0 Dec 05 12:32:19 crc kubenswrapper[4711]: I1205 12:32:19.162031 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerDied","Data":"886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7"} Dec 05 12:32:19 crc kubenswrapper[4711]: I1205 12:32:19.163674 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerDied","Data":"eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6"} Dec 05 12:32:19 crc kubenswrapper[4711]: I1205 12:32:19.163695 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerDied","Data":"9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce"} Dec 05 12:32:21 crc kubenswrapper[4711]: I1205 12:32:21.919613 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.061204 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-config-data\") pod \"0670acaa-f6cd-4026-89d5-8615a3965179\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.061292 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-sg-core-conf-yaml\") pod \"0670acaa-f6cd-4026-89d5-8615a3965179\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.061315 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q99tv\" (UniqueName: \"kubernetes.io/projected/0670acaa-f6cd-4026-89d5-8615a3965179-kube-api-access-q99tv\") pod \"0670acaa-f6cd-4026-89d5-8615a3965179\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.061353 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-combined-ca-bundle\") pod \"0670acaa-f6cd-4026-89d5-8615a3965179\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.061494 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-log-httpd\") pod \"0670acaa-f6cd-4026-89d5-8615a3965179\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.061562 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-run-httpd\") pod \"0670acaa-f6cd-4026-89d5-8615a3965179\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.061604 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-scripts\") pod \"0670acaa-f6cd-4026-89d5-8615a3965179\" (UID: \"0670acaa-f6cd-4026-89d5-8615a3965179\") " Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.062580 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0670acaa-f6cd-4026-89d5-8615a3965179" (UID: "0670acaa-f6cd-4026-89d5-8615a3965179"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.062687 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0670acaa-f6cd-4026-89d5-8615a3965179" (UID: "0670acaa-f6cd-4026-89d5-8615a3965179"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.067311 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0670acaa-f6cd-4026-89d5-8615a3965179-kube-api-access-q99tv" (OuterVolumeSpecName: "kube-api-access-q99tv") pod "0670acaa-f6cd-4026-89d5-8615a3965179" (UID: "0670acaa-f6cd-4026-89d5-8615a3965179"). InnerVolumeSpecName "kube-api-access-q99tv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.067933 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-scripts" (OuterVolumeSpecName: "scripts") pod "0670acaa-f6cd-4026-89d5-8615a3965179" (UID: "0670acaa-f6cd-4026-89d5-8615a3965179"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.091842 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0670acaa-f6cd-4026-89d5-8615a3965179" (UID: "0670acaa-f6cd-4026-89d5-8615a3965179"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.145515 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0670acaa-f6cd-4026-89d5-8615a3965179" (UID: "0670acaa-f6cd-4026-89d5-8615a3965179"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.156644 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-config-data" (OuterVolumeSpecName: "config-data") pod "0670acaa-f6cd-4026-89d5-8615a3965179" (UID: "0670acaa-f6cd-4026-89d5-8615a3965179"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.164214 4711 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.164248 4711 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0670acaa-f6cd-4026-89d5-8615a3965179-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.164259 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.164271 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.164279 4711 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.164291 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q99tv\" (UniqueName: \"kubernetes.io/projected/0670acaa-f6cd-4026-89d5-8615a3965179-kube-api-access-q99tv\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.164299 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0670acaa-f6cd-4026-89d5-8615a3965179-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.196693 4711 generic.go:334] "Generic (PLEG): container finished" podID="0670acaa-f6cd-4026-89d5-8615a3965179" containerID="8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde" exitCode=0 Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.196741 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerDied","Data":"8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde"} Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.196770 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0670acaa-f6cd-4026-89d5-8615a3965179","Type":"ContainerDied","Data":"c1ff534f6297a8696d65dbee7586f59a8572172c9b5c351f17958e2040c9f741"} Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.196806 4711 scope.go:117] "RemoveContainer" containerID="886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.196814 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.236395 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.240055 4711 scope.go:117] "RemoveContainer" containerID="eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.247596 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.277016 4711 scope.go:117] "RemoveContainer" containerID="9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.278374 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:22 crc kubenswrapper[4711]: E1205 12:32:22.278908 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="proxy-httpd" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.278931 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="proxy-httpd" Dec 05 12:32:22 crc kubenswrapper[4711]: E1205 12:32:22.278957 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="ceilometer-central-agent" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.278968 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="ceilometer-central-agent" Dec 05 12:32:22 crc kubenswrapper[4711]: E1205 12:32:22.278988 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="sg-core" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.278998 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="sg-core" Dec 05 12:32:22 crc kubenswrapper[4711]: E1205 12:32:22.279038 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="ceilometer-notification-agent" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.279049 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="ceilometer-notification-agent" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.279560 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="ceilometer-central-agent" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.279590 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="ceilometer-notification-agent" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.279603 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="proxy-httpd" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.279620 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" containerName="sg-core" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.281990 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.284549 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.284724 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.293152 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.334790 4711 scope.go:117] "RemoveContainer" containerID="8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.353966 4711 scope.go:117] "RemoveContainer" containerID="886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7" Dec 05 12:32:22 crc kubenswrapper[4711]: E1205 12:32:22.354329 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7\": container with ID starting with 886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7 not found: ID does not exist" containerID="886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.354360 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7"} err="failed to get container status \"886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7\": rpc error: code = NotFound desc = could not find container \"886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7\": container with ID starting with 886c3bdd46f748a56e32c349455c9d0c043776f96b1c7f179b047e434d755be7 not found: ID does not exist" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.354380 4711 scope.go:117] "RemoveContainer" containerID="eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6" Dec 05 12:32:22 crc kubenswrapper[4711]: E1205 12:32:22.354713 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6\": container with ID starting with eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6 not found: ID does not exist" containerID="eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.354770 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6"} err="failed to get container status \"eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6\": rpc error: code = NotFound desc = could not find container \"eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6\": container with ID starting with eca81cbaacdaacedf1d4f64f1b0298cb01e302261e5e63b9d48ee2fcf67783f6 not found: ID does not exist" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.354787 4711 scope.go:117] "RemoveContainer" containerID="9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce" Dec 05 12:32:22 crc kubenswrapper[4711]: E1205 12:32:22.355085 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce\": container with ID starting with 9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce not found: ID does not exist" containerID="9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.355121 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce"} err="failed to get container status \"9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce\": rpc error: code = NotFound desc = could not find container \"9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce\": container with ID starting with 9cb061ca513143c230194e948b0b6ea13f4034aa34d4958060d5b3674ee133ce not found: ID does not exist" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.355148 4711 scope.go:117] "RemoveContainer" containerID="8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde" Dec 05 12:32:22 crc kubenswrapper[4711]: E1205 12:32:22.355476 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde\": container with ID starting with 8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde not found: ID does not exist" containerID="8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.355500 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde"} err="failed to get container status \"8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde\": rpc error: code = NotFound desc = could not find container \"8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde\": container with ID starting with 8d1463fe9977b38d7385031187ecfdd56e42d339e7ab099b2f0da448fc2b0dde not found: ID does not exist" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.471704 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-log-httpd\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.471767 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.471846 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.471892 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-run-httpd\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.472013 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-config-data\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.472457 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-scripts\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.472612 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsfsn\" (UniqueName: \"kubernetes.io/projected/01830ceb-a989-4c58-ad76-b63f213e6a4b-kube-api-access-qsfsn\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.574958 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-scripts\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.575034 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsfsn\" (UniqueName: \"kubernetes.io/projected/01830ceb-a989-4c58-ad76-b63f213e6a4b-kube-api-access-qsfsn\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.575098 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-log-httpd\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.575133 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.575186 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.575214 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-run-httpd\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.575240 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-config-data\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.575739 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-log-httpd\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.575809 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-run-httpd\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.580556 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-scripts\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.580657 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-config-data\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.581408 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.581821 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.598928 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsfsn\" (UniqueName: \"kubernetes.io/projected/01830ceb-a989-4c58-ad76-b63f213e6a4b-kube-api-access-qsfsn\") pod \"ceilometer-0\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.600504 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:32:22 crc kubenswrapper[4711]: I1205 12:32:22.694988 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0670acaa-f6cd-4026-89d5-8615a3965179" path="/var/lib/kubelet/pods/0670acaa-f6cd-4026-89d5-8615a3965179/volumes" Dec 05 12:32:23 crc kubenswrapper[4711]: I1205 12:32:23.098632 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:23 crc kubenswrapper[4711]: I1205 12:32:23.209163 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerStarted","Data":"08abb37f2655fb7799c47b159456aa19f3eab8d2ed2c5b2c1df16b2e8bcf25b8"} Dec 05 12:32:24 crc kubenswrapper[4711]: I1205 12:32:24.223154 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerStarted","Data":"08c8ecfe5d4ea1215468a034aca450c8773d3fefb21f54a43c21282f84436c65"} Dec 05 12:32:24 crc kubenswrapper[4711]: I1205 12:32:24.223621 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerStarted","Data":"679e00bf4c5ad46d64af8c6314438a0a404db3fa1b8428a940ce07b7590606f2"} Dec 05 12:32:25 crc kubenswrapper[4711]: I1205 12:32:25.237775 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerStarted","Data":"b55b9b38d2838b2d27e9280f494ec8d102c5988a8c4904e67f527c4fb20805fd"} Dec 05 12:32:25 crc kubenswrapper[4711]: I1205 12:32:25.588628 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.061748 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-2hnmx"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.063111 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.067354 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.067571 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.076140 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-2hnmx"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.229015 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.231755 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.235591 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.247787 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9njj7\" (UniqueName: \"kubernetes.io/projected/a5592302-99d4-4e3c-948b-b9bccef58e6f-kube-api-access-9njj7\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.248973 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.249369 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-scripts\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.249741 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-config-data\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.263326 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.324995 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.327513 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.336758 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.381315 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.381397 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-config-data\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.385036 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-config-data\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.385118 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-config-data\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.385263 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9njj7\" (UniqueName: \"kubernetes.io/projected/a5592302-99d4-4e3c-948b-b9bccef58e6f-kube-api-access-9njj7\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.386915 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.387011 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.387124 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g822z\" (UniqueName: \"kubernetes.io/projected/740e9907-2689-4775-bf06-78517da985f7-kube-api-access-g822z\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.402765 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-config-data\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.405624 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-scripts\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.405977 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pqnx\" (UniqueName: \"kubernetes.io/projected/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-kube-api-access-9pqnx\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.406356 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-logs\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.406459 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/740e9907-2689-4775-bf06-78517da985f7-logs\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.421052 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.426572 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.436052 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-scripts\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.463504 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9njj7\" (UniqueName: \"kubernetes.io/projected/a5592302-99d4-4e3c-948b-b9bccef58e6f-kube-api-access-9njj7\") pod \"nova-cell0-cell-mapping-2hnmx\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.512657 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pqnx\" (UniqueName: \"kubernetes.io/projected/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-kube-api-access-9pqnx\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.512722 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-logs\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.512772 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/740e9907-2689-4775-bf06-78517da985f7-logs\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.512811 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.512830 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-config-data\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.512886 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-config-data\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.512975 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.513047 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g822z\" (UniqueName: \"kubernetes.io/projected/740e9907-2689-4775-bf06-78517da985f7-kube-api-access-g822z\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.519000 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-config-data\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.519318 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-logs\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.519912 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/740e9907-2689-4775-bf06-78517da985f7-logs\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.520710 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.525817 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.527374 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-config-data\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.538417 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.538879 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pqnx\" (UniqueName: \"kubernetes.io/projected/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-kube-api-access-9pqnx\") pod \"nova-api-0\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.540147 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.543674 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g822z\" (UniqueName: \"kubernetes.io/projected/740e9907-2689-4775-bf06-78517da985f7-kube-api-access-g822z\") pod \"nova-metadata-0\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.545639 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.554808 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.586742 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.597758 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c7cddbf6c-zpqgg"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.599779 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.614798 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-config-data\") pod \"nova-scheduler-0\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.614939 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.614976 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxh9b\" (UniqueName: \"kubernetes.io/projected/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-kube-api-access-xxh9b\") pod \"nova-scheduler-0\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.637565 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.641233 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.646818 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.661875 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c7cddbf6c-zpqgg"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.687890 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.687922 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.688409 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.717996 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxh9b\" (UniqueName: \"kubernetes.io/projected/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-kube-api-access-xxh9b\") pod \"nova-scheduler-0\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718076 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-sb\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718124 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwfmf\" (UniqueName: \"kubernetes.io/projected/3a092676-35a7-435a-a7d2-06dd27c07809-kube-api-access-wwfmf\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718160 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-svc\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718202 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718230 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-config-data\") pod \"nova-scheduler-0\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718283 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-nb\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718347 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-config\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718440 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718469 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-swift-storage-0\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718503 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.718526 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwhbf\" (UniqueName: \"kubernetes.io/projected/907e0f25-2faa-4e14-a121-a5b31b285b7c-kube-api-access-hwhbf\") pod \"nova-cell1-novncproxy-0\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.727952 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-config-data\") pod \"nova-scheduler-0\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.734757 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.745242 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxh9b\" (UniqueName: \"kubernetes.io/projected/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-kube-api-access-xxh9b\") pod \"nova-scheduler-0\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.826655 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-sb\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.826745 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwfmf\" (UniqueName: \"kubernetes.io/projected/3a092676-35a7-435a-a7d2-06dd27c07809-kube-api-access-wwfmf\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.826788 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-svc\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.826857 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.826941 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-nb\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.827006 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-config\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.827074 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.827100 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-swift-storage-0\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.827138 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwhbf\" (UniqueName: \"kubernetes.io/projected/907e0f25-2faa-4e14-a121-a5b31b285b7c-kube-api-access-hwhbf\") pod \"nova-cell1-novncproxy-0\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.828878 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-sb\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.829990 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-swift-storage-0\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.831352 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-nb\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.832075 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-svc\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.834028 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-config\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.879136 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.919417 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.938767 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.938782 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwfmf\" (UniqueName: \"kubernetes.io/projected/3a092676-35a7-435a-a7d2-06dd27c07809-kube-api-access-wwfmf\") pod \"dnsmasq-dns-6c7cddbf6c-zpqgg\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.939372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwhbf\" (UniqueName: \"kubernetes.io/projected/907e0f25-2faa-4e14-a121-a5b31b285b7c-kube-api-access-hwhbf\") pod \"nova-cell1-novncproxy-0\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:26 crc kubenswrapper[4711]: I1205 12:32:26.965669 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.230528 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.231599 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.276718 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerStarted","Data":"d17cff6afafd29fcff06414470bf216f09404c94134124f5ded90fe02df6b87a"} Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.277492 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.292623 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5ac07289-6924-47a2-8e4e-1c2b5d7d915c","Type":"ContainerStarted","Data":"a86100d2f4eafbbb83ad2bfc2675b6ffee044ec4b72daba742a0a3b0559377d2"} Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.331085 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.551987975 podStartE2EDuration="5.3310632s" podCreationTimestamp="2025-12-05 12:32:22 +0000 UTC" firstStartedPulling="2025-12-05 12:32:23.103704172 +0000 UTC m=+1388.688026502" lastFinishedPulling="2025-12-05 12:32:25.882779397 +0000 UTC m=+1391.467101727" observedRunningTime="2025-12-05 12:32:27.304112297 +0000 UTC m=+1392.888434627" watchObservedRunningTime="2025-12-05 12:32:27.3310632 +0000 UTC m=+1392.915385530" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.471636 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-2hnmx"] Dec 05 12:32:27 crc kubenswrapper[4711]: W1205 12:32:27.480995 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5592302_99d4_4e3c_948b_b9bccef58e6f.slice/crio-a19faa0dbebf0f7b8be954299bbab7385e6759c5e26b00a9ec3a5b0b89c7c94e WatchSource:0}: Error finding container a19faa0dbebf0f7b8be954299bbab7385e6759c5e26b00a9ec3a5b0b89c7c94e: Status 404 returned error can't find the container with id a19faa0dbebf0f7b8be954299bbab7385e6759c5e26b00a9ec3a5b0b89c7c94e Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.598812 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sgvbg"] Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.601070 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.605990 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.606200 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.612984 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sgvbg"] Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.649723 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5rxf\" (UniqueName: \"kubernetes.io/projected/001b7802-4a74-4409-8667-128892193313-kube-api-access-x5rxf\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.650059 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-scripts\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.650215 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.650293 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-config-data\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.704442 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:32:27 crc kubenswrapper[4711]: W1205 12:32:27.722953 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod740e9907_2689_4775_bf06_78517da985f7.slice/crio-96fa96bc44cbc86d605b7c7ebcdb3a343cac6b1d7f0daa964f999069663fbd90 WatchSource:0}: Error finding container 96fa96bc44cbc86d605b7c7ebcdb3a343cac6b1d7f0daa964f999069663fbd90: Status 404 returned error can't find the container with id 96fa96bc44cbc86d605b7c7ebcdb3a343cac6b1d7f0daa964f999069663fbd90 Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.754594 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.754699 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-config-data\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.754795 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5rxf\" (UniqueName: \"kubernetes.io/projected/001b7802-4a74-4409-8667-128892193313-kube-api-access-x5rxf\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.754825 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-scripts\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.763265 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-config-data\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.764595 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.772744 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-scripts\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.797766 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.812510 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5rxf\" (UniqueName: \"kubernetes.io/projected/001b7802-4a74-4409-8667-128892193313-kube-api-access-x5rxf\") pod \"nova-cell1-conductor-db-sync-sgvbg\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.827675 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:32:27 crc kubenswrapper[4711]: W1205 12:32:27.843827 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod907e0f25_2faa_4e14_a121_a5b31b285b7c.slice/crio-40fdf991462df1b42c795e7351ba9b0bf27d2271fe3534b4e19651f27117b838 WatchSource:0}: Error finding container 40fdf991462df1b42c795e7351ba9b0bf27d2271fe3534b4e19651f27117b838: Status 404 returned error can't find the container with id 40fdf991462df1b42c795e7351ba9b0bf27d2271fe3534b4e19651f27117b838 Dec 05 12:32:27 crc kubenswrapper[4711]: I1205 12:32:27.950132 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.088546 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c7cddbf6c-zpqgg"] Dec 05 12:32:28 crc kubenswrapper[4711]: W1205 12:32:28.117781 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a092676_35a7_435a_a7d2_06dd27c07809.slice/crio-88ca84b0f908e1c0b31605002550a2df44fda28fcb41e7d2145ee93c65795b02 WatchSource:0}: Error finding container 88ca84b0f908e1c0b31605002550a2df44fda28fcb41e7d2145ee93c65795b02: Status 404 returned error can't find the container with id 88ca84b0f908e1c0b31605002550a2df44fda28fcb41e7d2145ee93c65795b02 Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.309813 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0e85ce21-b60b-492f-94aa-fdbf86ea22b2","Type":"ContainerStarted","Data":"5d354974bd76287b72cf806ef29ccfd895d4c82dc365562fd2b50c3d1709158d"} Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.313136 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" event={"ID":"3a092676-35a7-435a-a7d2-06dd27c07809","Type":"ContainerStarted","Data":"88ca84b0f908e1c0b31605002550a2df44fda28fcb41e7d2145ee93c65795b02"} Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.325540 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-2hnmx" event={"ID":"a5592302-99d4-4e3c-948b-b9bccef58e6f","Type":"ContainerStarted","Data":"8a5a8f0fde0ae46fb1b38fa8e549a85e2558f601d06143ff46f4fddb87e7870e"} Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.325595 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-2hnmx" event={"ID":"a5592302-99d4-4e3c-948b-b9bccef58e6f","Type":"ContainerStarted","Data":"a19faa0dbebf0f7b8be954299bbab7385e6759c5e26b00a9ec3a5b0b89c7c94e"} Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.333433 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"907e0f25-2faa-4e14-a121-a5b31b285b7c","Type":"ContainerStarted","Data":"40fdf991462df1b42c795e7351ba9b0bf27d2271fe3534b4e19651f27117b838"} Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.341195 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"740e9907-2689-4775-bf06-78517da985f7","Type":"ContainerStarted","Data":"96fa96bc44cbc86d605b7c7ebcdb3a343cac6b1d7f0daa964f999069663fbd90"} Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.351163 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-2hnmx" podStartSLOduration=2.351143433 podStartE2EDuration="2.351143433s" podCreationTimestamp="2025-12-05 12:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:32:28.347968465 +0000 UTC m=+1393.932290815" watchObservedRunningTime="2025-12-05 12:32:28.351143433 +0000 UTC m=+1393.935465763" Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.507290 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sgvbg"] Dec 05 12:32:28 crc kubenswrapper[4711]: I1205 12:32:28.709803 4711 scope.go:117] "RemoveContainer" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" Dec 05 12:32:29 crc kubenswrapper[4711]: I1205 12:32:29.354715 4711 generic.go:334] "Generic (PLEG): container finished" podID="3a092676-35a7-435a-a7d2-06dd27c07809" containerID="dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48" exitCode=0 Dec 05 12:32:29 crc kubenswrapper[4711]: I1205 12:32:29.354886 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" event={"ID":"3a092676-35a7-435a-a7d2-06dd27c07809","Type":"ContainerDied","Data":"dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48"} Dec 05 12:32:29 crc kubenswrapper[4711]: I1205 12:32:29.358207 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-sgvbg" event={"ID":"001b7802-4a74-4409-8667-128892193313","Type":"ContainerStarted","Data":"2bdfb993dfe4a10d9fbe22bbb422bd944d224d3a39975fe62edca57dd4e16fc2"} Dec 05 12:32:29 crc kubenswrapper[4711]: I1205 12:32:29.358270 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-sgvbg" event={"ID":"001b7802-4a74-4409-8667-128892193313","Type":"ContainerStarted","Data":"f3f5a2548b4925dc7b6ed02b9bf3ae73fa9b76258ff32aec8aa433812fe3d314"} Dec 05 12:32:29 crc kubenswrapper[4711]: I1205 12:32:29.362571 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerStarted","Data":"310df0f2ff941e827e368e2de9a5dc404af9dcf7895906b99970cd36b72e2c6a"} Dec 05 12:32:29 crc kubenswrapper[4711]: I1205 12:32:29.397455 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-sgvbg" podStartSLOduration=2.39743075 podStartE2EDuration="2.39743075s" podCreationTimestamp="2025-12-05 12:32:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:32:29.393253977 +0000 UTC m=+1394.977576307" watchObservedRunningTime="2025-12-05 12:32:29.39743075 +0000 UTC m=+1394.981753090" Dec 05 12:32:30 crc kubenswrapper[4711]: I1205 12:32:30.052901 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:32:30 crc kubenswrapper[4711]: I1205 12:32:30.144796 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:32:30 crc kubenswrapper[4711]: I1205 12:32:30.850248 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:32:30 crc kubenswrapper[4711]: I1205 12:32:30.905101 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 12:32:31 crc kubenswrapper[4711]: I1205 12:32:31.380784 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:32:31 crc kubenswrapper[4711]: I1205 12:32:31.425031 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 12:32:31 crc kubenswrapper[4711]: I1205 12:32:31.480899 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.404103 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" event={"ID":"3a092676-35a7-435a-a7d2-06dd27c07809","Type":"ContainerStarted","Data":"ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae"} Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.404802 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.407176 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"907e0f25-2faa-4e14-a121-a5b31b285b7c","Type":"ContainerStarted","Data":"bad9ac51a1a41308edb5c2832c87c028f49e13e6c9c25ff1d355d5e8f71eb77f"} Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.407326 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="907e0f25-2faa-4e14-a121-a5b31b285b7c" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://bad9ac51a1a41308edb5c2832c87c028f49e13e6c9c25ff1d355d5e8f71eb77f" gracePeriod=30 Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.410361 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5ac07289-6924-47a2-8e4e-1c2b5d7d915c","Type":"ContainerStarted","Data":"e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2"} Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.410425 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5ac07289-6924-47a2-8e4e-1c2b5d7d915c","Type":"ContainerStarted","Data":"781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9"} Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.413768 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0e85ce21-b60b-492f-94aa-fdbf86ea22b2","Type":"ContainerStarted","Data":"978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493"} Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.417433 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" containerID="cri-o://310df0f2ff941e827e368e2de9a5dc404af9dcf7895906b99970cd36b72e2c6a" gracePeriod=30 Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.417603 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="740e9907-2689-4775-bf06-78517da985f7" containerName="nova-metadata-log" containerID="cri-o://4bd7f868a765e36f431707c4d5a312ff62d3879c7dbb47db534b0ab7e8bce13a" gracePeriod=30 Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.417671 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"740e9907-2689-4775-bf06-78517da985f7","Type":"ContainerStarted","Data":"b98eded8dfe31315b1fd5c070239621819f6e4214d67a119146b37cca7ff0447"} Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.417698 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"740e9907-2689-4775-bf06-78517da985f7","Type":"ContainerStarted","Data":"4bd7f868a765e36f431707c4d5a312ff62d3879c7dbb47db534b0ab7e8bce13a"} Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.417977 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="740e9907-2689-4775-bf06-78517da985f7" containerName="nova-metadata-metadata" containerID="cri-o://b98eded8dfe31315b1fd5c070239621819f6e4214d67a119146b37cca7ff0447" gracePeriod=30 Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.441699 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" podStartSLOduration=7.441675994 podStartE2EDuration="7.441675994s" podCreationTimestamp="2025-12-05 12:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:32:33.42689148 +0000 UTC m=+1399.011213820" watchObservedRunningTime="2025-12-05 12:32:33.441675994 +0000 UTC m=+1399.025998324" Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.452093 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.651370054 podStartE2EDuration="7.452068879s" podCreationTimestamp="2025-12-05 12:32:26 +0000 UTC" firstStartedPulling="2025-12-05 12:32:27.834924779 +0000 UTC m=+1393.419247109" lastFinishedPulling="2025-12-05 12:32:32.635623604 +0000 UTC m=+1398.219945934" observedRunningTime="2025-12-05 12:32:33.449302161 +0000 UTC m=+1399.033624511" watchObservedRunningTime="2025-12-05 12:32:33.452068879 +0000 UTC m=+1399.036391219" Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.481744 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.69386858 podStartE2EDuration="7.481720228s" podCreationTimestamp="2025-12-05 12:32:26 +0000 UTC" firstStartedPulling="2025-12-05 12:32:27.847859298 +0000 UTC m=+1393.432181628" lastFinishedPulling="2025-12-05 12:32:32.635710936 +0000 UTC m=+1398.220033276" observedRunningTime="2025-12-05 12:32:33.466992087 +0000 UTC m=+1399.051314427" watchObservedRunningTime="2025-12-05 12:32:33.481720228 +0000 UTC m=+1399.066042558" Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.485119 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.568046096 podStartE2EDuration="7.485105002s" podCreationTimestamp="2025-12-05 12:32:26 +0000 UTC" firstStartedPulling="2025-12-05 12:32:27.726132714 +0000 UTC m=+1393.310455044" lastFinishedPulling="2025-12-05 12:32:32.64319162 +0000 UTC m=+1398.227513950" observedRunningTime="2025-12-05 12:32:33.482832466 +0000 UTC m=+1399.067154806" watchObservedRunningTime="2025-12-05 12:32:33.485105002 +0000 UTC m=+1399.069427332" Dec 05 12:32:33 crc kubenswrapper[4711]: I1205 12:32:33.511625 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.206947957 podStartE2EDuration="7.511600084s" podCreationTimestamp="2025-12-05 12:32:26 +0000 UTC" firstStartedPulling="2025-12-05 12:32:27.3318946 +0000 UTC m=+1392.916216940" lastFinishedPulling="2025-12-05 12:32:32.636546727 +0000 UTC m=+1398.220869067" observedRunningTime="2025-12-05 12:32:33.506068248 +0000 UTC m=+1399.090390588" watchObservedRunningTime="2025-12-05 12:32:33.511600084 +0000 UTC m=+1399.095922414" Dec 05 12:32:34 crc kubenswrapper[4711]: I1205 12:32:34.429751 4711 generic.go:334] "Generic (PLEG): container finished" podID="740e9907-2689-4775-bf06-78517da985f7" containerID="4bd7f868a765e36f431707c4d5a312ff62d3879c7dbb47db534b0ab7e8bce13a" exitCode=143 Dec 05 12:32:34 crc kubenswrapper[4711]: I1205 12:32:34.429858 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"740e9907-2689-4775-bf06-78517da985f7","Type":"ContainerDied","Data":"4bd7f868a765e36f431707c4d5a312ff62d3879c7dbb47db534b0ab7e8bce13a"} Dec 05 12:32:36 crc kubenswrapper[4711]: I1205 12:32:36.556248 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:32:36 crc kubenswrapper[4711]: I1205 12:32:36.556521 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:32:36 crc kubenswrapper[4711]: I1205 12:32:36.695591 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:32:36 crc kubenswrapper[4711]: I1205 12:32:36.695626 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:32:36 crc kubenswrapper[4711]: I1205 12:32:36.880299 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 12:32:36 crc kubenswrapper[4711]: I1205 12:32:36.880349 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 12:32:36 crc kubenswrapper[4711]: I1205 12:32:36.913033 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 12:32:36 crc kubenswrapper[4711]: I1205 12:32:36.969586 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:32:37 crc kubenswrapper[4711]: I1205 12:32:37.472566 4711 generic.go:334] "Generic (PLEG): container finished" podID="a5592302-99d4-4e3c-948b-b9bccef58e6f" containerID="8a5a8f0fde0ae46fb1b38fa8e549a85e2558f601d06143ff46f4fddb87e7870e" exitCode=0 Dec 05 12:32:37 crc kubenswrapper[4711]: I1205 12:32:37.472647 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-2hnmx" event={"ID":"a5592302-99d4-4e3c-948b-b9bccef58e6f","Type":"ContainerDied","Data":"8a5a8f0fde0ae46fb1b38fa8e549a85e2558f601d06143ff46f4fddb87e7870e"} Dec 05 12:32:37 crc kubenswrapper[4711]: I1205 12:32:37.502548 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 12:32:37 crc kubenswrapper[4711]: I1205 12:32:37.638642 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:32:37 crc kubenswrapper[4711]: I1205 12:32:37.638990 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:32:38 crc kubenswrapper[4711]: I1205 12:32:38.862209 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:38 crc kubenswrapper[4711]: I1205 12:32:38.954914 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-config-data\") pod \"a5592302-99d4-4e3c-948b-b9bccef58e6f\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " Dec 05 12:32:38 crc kubenswrapper[4711]: I1205 12:32:38.955211 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-combined-ca-bundle\") pod \"a5592302-99d4-4e3c-948b-b9bccef58e6f\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " Dec 05 12:32:38 crc kubenswrapper[4711]: I1205 12:32:38.955346 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9njj7\" (UniqueName: \"kubernetes.io/projected/a5592302-99d4-4e3c-948b-b9bccef58e6f-kube-api-access-9njj7\") pod \"a5592302-99d4-4e3c-948b-b9bccef58e6f\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " Dec 05 12:32:38 crc kubenswrapper[4711]: I1205 12:32:38.955445 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-scripts\") pod \"a5592302-99d4-4e3c-948b-b9bccef58e6f\" (UID: \"a5592302-99d4-4e3c-948b-b9bccef58e6f\") " Dec 05 12:32:38 crc kubenswrapper[4711]: I1205 12:32:38.967655 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-scripts" (OuterVolumeSpecName: "scripts") pod "a5592302-99d4-4e3c-948b-b9bccef58e6f" (UID: "a5592302-99d4-4e3c-948b-b9bccef58e6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:38 crc kubenswrapper[4711]: I1205 12:32:38.967756 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5592302-99d4-4e3c-948b-b9bccef58e6f-kube-api-access-9njj7" (OuterVolumeSpecName: "kube-api-access-9njj7") pod "a5592302-99d4-4e3c-948b-b9bccef58e6f" (UID: "a5592302-99d4-4e3c-948b-b9bccef58e6f"). InnerVolumeSpecName "kube-api-access-9njj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:32:38 crc kubenswrapper[4711]: I1205 12:32:38.985982 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5592302-99d4-4e3c-948b-b9bccef58e6f" (UID: "a5592302-99d4-4e3c-948b-b9bccef58e6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:38 crc kubenswrapper[4711]: I1205 12:32:38.991216 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-config-data" (OuterVolumeSpecName: "config-data") pod "a5592302-99d4-4e3c-948b-b9bccef58e6f" (UID: "a5592302-99d4-4e3c-948b-b9bccef58e6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.057499 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9njj7\" (UniqueName: \"kubernetes.io/projected/a5592302-99d4-4e3c-948b-b9bccef58e6f-kube-api-access-9njj7\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.057537 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.057551 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.057563 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5592302-99d4-4e3c-948b-b9bccef58e6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.494001 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-2hnmx" event={"ID":"a5592302-99d4-4e3c-948b-b9bccef58e6f","Type":"ContainerDied","Data":"a19faa0dbebf0f7b8be954299bbab7385e6759c5e26b00a9ec3a5b0b89c7c94e"} Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.494051 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a19faa0dbebf0f7b8be954299bbab7385e6759c5e26b00a9ec3a5b0b89c7c94e" Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.494114 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-2hnmx" Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.674538 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.674996 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-log" containerID="cri-o://781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9" gracePeriod=30 Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.675069 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-api" containerID="cri-o://e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2" gracePeriod=30 Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.690065 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:32:39 crc kubenswrapper[4711]: I1205 12:32:39.690239 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0e85ce21-b60b-492f-94aa-fdbf86ea22b2" containerName="nova-scheduler-scheduler" containerID="cri-o://978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493" gracePeriod=30 Dec 05 12:32:40 crc kubenswrapper[4711]: I1205 12:32:40.512471 4711 generic.go:334] "Generic (PLEG): container finished" podID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerID="781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9" exitCode=143 Dec 05 12:32:40 crc kubenswrapper[4711]: I1205 12:32:40.512629 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5ac07289-6924-47a2-8e4e-1c2b5d7d915c","Type":"ContainerDied","Data":"781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9"} Dec 05 12:32:40 crc kubenswrapper[4711]: I1205 12:32:40.963807 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.097309 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-combined-ca-bundle\") pod \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.097380 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-logs\") pod \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.097458 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-config-data\") pod \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.097682 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pqnx\" (UniqueName: \"kubernetes.io/projected/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-kube-api-access-9pqnx\") pod \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\" (UID: \"5ac07289-6924-47a2-8e4e-1c2b5d7d915c\") " Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.097942 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-logs" (OuterVolumeSpecName: "logs") pod "5ac07289-6924-47a2-8e4e-1c2b5d7d915c" (UID: "5ac07289-6924-47a2-8e4e-1c2b5d7d915c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.098287 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.108082 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-kube-api-access-9pqnx" (OuterVolumeSpecName: "kube-api-access-9pqnx") pod "5ac07289-6924-47a2-8e4e-1c2b5d7d915c" (UID: "5ac07289-6924-47a2-8e4e-1c2b5d7d915c"). InnerVolumeSpecName "kube-api-access-9pqnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.142494 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ac07289-6924-47a2-8e4e-1c2b5d7d915c" (UID: "5ac07289-6924-47a2-8e4e-1c2b5d7d915c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.142988 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-config-data" (OuterVolumeSpecName: "config-data") pod "5ac07289-6924-47a2-8e4e-1c2b5d7d915c" (UID: "5ac07289-6924-47a2-8e4e-1c2b5d7d915c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.200052 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pqnx\" (UniqueName: \"kubernetes.io/projected/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-kube-api-access-9pqnx\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.200091 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.200106 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac07289-6924-47a2-8e4e-1c2b5d7d915c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.499434 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.547009 4711 generic.go:334] "Generic (PLEG): container finished" podID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerID="e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2" exitCode=0 Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.547157 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.547854 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5ac07289-6924-47a2-8e4e-1c2b5d7d915c","Type":"ContainerDied","Data":"e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2"} Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.547890 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5ac07289-6924-47a2-8e4e-1c2b5d7d915c","Type":"ContainerDied","Data":"a86100d2f4eafbbb83ad2bfc2675b6ffee044ec4b72daba742a0a3b0559377d2"} Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.547909 4711 scope.go:117] "RemoveContainer" containerID="e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.550516 4711 generic.go:334] "Generic (PLEG): container finished" podID="0e85ce21-b60b-492f-94aa-fdbf86ea22b2" containerID="978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493" exitCode=0 Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.550551 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0e85ce21-b60b-492f-94aa-fdbf86ea22b2","Type":"ContainerDied","Data":"978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493"} Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.550578 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0e85ce21-b60b-492f-94aa-fdbf86ea22b2","Type":"ContainerDied","Data":"5d354974bd76287b72cf806ef29ccfd895d4c82dc365562fd2b50c3d1709158d"} Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.550630 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.592943 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.609104 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxh9b\" (UniqueName: \"kubernetes.io/projected/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-kube-api-access-xxh9b\") pod \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.609278 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-combined-ca-bundle\") pod \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.609428 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-config-data\") pod \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\" (UID: \"0e85ce21-b60b-492f-94aa-fdbf86ea22b2\") " Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.613173 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.616750 4711 scope.go:117] "RemoveContainer" containerID="781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.641826 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-kube-api-access-xxh9b" (OuterVolumeSpecName: "kube-api-access-xxh9b") pod "0e85ce21-b60b-492f-94aa-fdbf86ea22b2" (UID: "0e85ce21-b60b-492f-94aa-fdbf86ea22b2"). InnerVolumeSpecName "kube-api-access-xxh9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.667810 4711 scope.go:117] "RemoveContainer" containerID="e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2" Dec 05 12:32:41 crc kubenswrapper[4711]: E1205 12:32:41.668951 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2\": container with ID starting with e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2 not found: ID does not exist" containerID="e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.668987 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2"} err="failed to get container status \"e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2\": rpc error: code = NotFound desc = could not find container \"e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2\": container with ID starting with e9327c4478fb5c90023d541ac19cd67f302c12d8165db80eb16a112111025ca2 not found: ID does not exist" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.669045 4711 scope.go:117] "RemoveContainer" containerID="781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9" Dec 05 12:32:41 crc kubenswrapper[4711]: E1205 12:32:41.669349 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9\": container with ID starting with 781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9 not found: ID does not exist" containerID="781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.669424 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9"} err="failed to get container status \"781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9\": rpc error: code = NotFound desc = could not find container \"781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9\": container with ID starting with 781e3f60c94a8a5568a8b80924e43251049c565f49c6c2a49721dda5b1361da9 not found: ID does not exist" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.669437 4711 scope.go:117] "RemoveContainer" containerID="978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.679982 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 12:32:41 crc kubenswrapper[4711]: E1205 12:32:41.680676 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-log" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.680724 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-log" Dec 05 12:32:41 crc kubenswrapper[4711]: E1205 12:32:41.680751 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-api" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.680760 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-api" Dec 05 12:32:41 crc kubenswrapper[4711]: E1205 12:32:41.680813 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5592302-99d4-4e3c-948b-b9bccef58e6f" containerName="nova-manage" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.680822 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5592302-99d4-4e3c-948b-b9bccef58e6f" containerName="nova-manage" Dec 05 12:32:41 crc kubenswrapper[4711]: E1205 12:32:41.680838 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e85ce21-b60b-492f-94aa-fdbf86ea22b2" containerName="nova-scheduler-scheduler" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.680845 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e85ce21-b60b-492f-94aa-fdbf86ea22b2" containerName="nova-scheduler-scheduler" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.681253 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-api" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.681321 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5592302-99d4-4e3c-948b-b9bccef58e6f" containerName="nova-manage" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.681350 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" containerName="nova-api-log" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.681421 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e85ce21-b60b-492f-94aa-fdbf86ea22b2" containerName="nova-scheduler-scheduler" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.689168 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.690483 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.694272 4711 scope.go:117] "RemoveContainer" containerID="978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.694569 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 12:32:41 crc kubenswrapper[4711]: E1205 12:32:41.694773 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493\": container with ID starting with 978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493 not found: ID does not exist" containerID="978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.694808 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493"} err="failed to get container status \"978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493\": rpc error: code = NotFound desc = could not find container \"978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493\": container with ID starting with 978ec3237aac142b788ada5d0f973321d4fe237a50c87aec389b5c56ccd8a493 not found: ID does not exist" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.700575 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-config-data" (OuterVolumeSpecName: "config-data") pod "0e85ce21-b60b-492f-94aa-fdbf86ea22b2" (UID: "0e85ce21-b60b-492f-94aa-fdbf86ea22b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.704761 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e85ce21-b60b-492f-94aa-fdbf86ea22b2" (UID: "0e85ce21-b60b-492f-94aa-fdbf86ea22b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.715036 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.715073 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.715086 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxh9b\" (UniqueName: \"kubernetes.io/projected/0e85ce21-b60b-492f-94aa-fdbf86ea22b2-kube-api-access-xxh9b\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.817249 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7cbaee1-6b39-409e-b338-660c4a6ba023-logs\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.817308 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.817445 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-config-data\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.817496 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qf58\" (UniqueName: \"kubernetes.io/projected/b7cbaee1-6b39-409e-b338-660c4a6ba023-kube-api-access-8qf58\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.919053 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-config-data\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.919241 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qf58\" (UniqueName: \"kubernetes.io/projected/b7cbaee1-6b39-409e-b338-660c4a6ba023-kube-api-access-8qf58\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.919404 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7cbaee1-6b39-409e-b338-660c4a6ba023-logs\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.919517 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.919807 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7cbaee1-6b39-409e-b338-660c4a6ba023-logs\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.923274 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.923973 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-config-data\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.940095 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.944244 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qf58\" (UniqueName: \"kubernetes.io/projected/b7cbaee1-6b39-409e-b338-660c4a6ba023-kube-api-access-8qf58\") pod \"nova-api-0\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " pod="openstack/nova-api-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.954397 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.964471 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.965947 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.974059 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:32:41 crc kubenswrapper[4711]: I1205 12:32:41.977118 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.008827 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.123177 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlsqz\" (UniqueName: \"kubernetes.io/projected/c77ea68b-9a3e-4579-84d5-f13aab92e17c-kube-api-access-mlsqz\") pod \"nova-scheduler-0\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.123684 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.124127 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-config-data\") pod \"nova-scheduler-0\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.225700 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlsqz\" (UniqueName: \"kubernetes.io/projected/c77ea68b-9a3e-4579-84d5-f13aab92e17c-kube-api-access-mlsqz\") pod \"nova-scheduler-0\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.225809 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.225905 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-config-data\") pod \"nova-scheduler-0\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.230700 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-config-data\") pod \"nova-scheduler-0\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.230754 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.232586 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.244417 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlsqz\" (UniqueName: \"kubernetes.io/projected/c77ea68b-9a3e-4579-84d5-f13aab92e17c-kube-api-access-mlsqz\") pod \"nova-scheduler-0\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.301665 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b748ccb4c-d9fwx"] Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.301970 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" podUID="49030595-188e-44f8-9602-a8397952c540" containerName="dnsmasq-dns" containerID="cri-o://f59e27b825d461dcededbbe1849736de5618d1c2e13f6a2ddd822fe459259077" gracePeriod=10 Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.419694 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:32:42 crc kubenswrapper[4711]: E1205 12:32:42.437245 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49030595_188e_44f8_9602_a8397952c540.slice/crio-f59e27b825d461dcededbbe1849736de5618d1c2e13f6a2ddd822fe459259077.scope\": RecentStats: unable to find data in memory cache]" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.476890 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:32:42 crc kubenswrapper[4711]: W1205 12:32:42.499929 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7cbaee1_6b39_409e_b338_660c4a6ba023.slice/crio-72d6b416e607b8ccb44b47dc2e37602da8a6b36611697d178b33d341330adcf1 WatchSource:0}: Error finding container 72d6b416e607b8ccb44b47dc2e37602da8a6b36611697d178b33d341330adcf1: Status 404 returned error can't find the container with id 72d6b416e607b8ccb44b47dc2e37602da8a6b36611697d178b33d341330adcf1 Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.575705 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b7cbaee1-6b39-409e-b338-660c4a6ba023","Type":"ContainerStarted","Data":"72d6b416e607b8ccb44b47dc2e37602da8a6b36611697d178b33d341330adcf1"} Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.578747 4711 generic.go:334] "Generic (PLEG): container finished" podID="49030595-188e-44f8-9602-a8397952c540" containerID="f59e27b825d461dcededbbe1849736de5618d1c2e13f6a2ddd822fe459259077" exitCode=0 Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.578797 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" event={"ID":"49030595-188e-44f8-9602-a8397952c540","Type":"ContainerDied","Data":"f59e27b825d461dcededbbe1849736de5618d1c2e13f6a2ddd822fe459259077"} Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.698681 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e85ce21-b60b-492f-94aa-fdbf86ea22b2" path="/var/lib/kubelet/pods/0e85ce21-b60b-492f-94aa-fdbf86ea22b2/volumes" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.699496 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ac07289-6924-47a2-8e4e-1c2b5d7d915c" path="/var/lib/kubelet/pods/5ac07289-6924-47a2-8e4e-1c2b5d7d915c/volumes" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.813178 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.943921 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-swift-storage-0\") pod \"49030595-188e-44f8-9602-a8397952c540\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.943998 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4wtv\" (UniqueName: \"kubernetes.io/projected/49030595-188e-44f8-9602-a8397952c540-kube-api-access-t4wtv\") pod \"49030595-188e-44f8-9602-a8397952c540\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.944036 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-svc\") pod \"49030595-188e-44f8-9602-a8397952c540\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.944075 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-config\") pod \"49030595-188e-44f8-9602-a8397952c540\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.944124 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-sb\") pod \"49030595-188e-44f8-9602-a8397952c540\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.944156 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-nb\") pod \"49030595-188e-44f8-9602-a8397952c540\" (UID: \"49030595-188e-44f8-9602-a8397952c540\") " Dec 05 12:32:42 crc kubenswrapper[4711]: I1205 12:32:42.955029 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49030595-188e-44f8-9602-a8397952c540-kube-api-access-t4wtv" (OuterVolumeSpecName: "kube-api-access-t4wtv") pod "49030595-188e-44f8-9602-a8397952c540" (UID: "49030595-188e-44f8-9602-a8397952c540"). InnerVolumeSpecName "kube-api-access-t4wtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.007037 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:32:43 crc kubenswrapper[4711]: W1205 12:32:43.022593 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc77ea68b_9a3e_4579_84d5_f13aab92e17c.slice/crio-c8e8676c6ab84dd27115ebd9408546a27af7c67ed1eb066fd43a6b0eee2fa21b WatchSource:0}: Error finding container c8e8676c6ab84dd27115ebd9408546a27af7c67ed1eb066fd43a6b0eee2fa21b: Status 404 returned error can't find the container with id c8e8676c6ab84dd27115ebd9408546a27af7c67ed1eb066fd43a6b0eee2fa21b Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.047328 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4wtv\" (UniqueName: \"kubernetes.io/projected/49030595-188e-44f8-9602-a8397952c540-kube-api-access-t4wtv\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.049295 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "49030595-188e-44f8-9602-a8397952c540" (UID: "49030595-188e-44f8-9602-a8397952c540"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.050093 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "49030595-188e-44f8-9602-a8397952c540" (UID: "49030595-188e-44f8-9602-a8397952c540"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.053843 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "49030595-188e-44f8-9602-a8397952c540" (UID: "49030595-188e-44f8-9602-a8397952c540"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.067575 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-config" (OuterVolumeSpecName: "config") pod "49030595-188e-44f8-9602-a8397952c540" (UID: "49030595-188e-44f8-9602-a8397952c540"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.067662 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "49030595-188e-44f8-9602-a8397952c540" (UID: "49030595-188e-44f8-9602-a8397952c540"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.149177 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.149219 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.149263 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.149280 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.149292 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49030595-188e-44f8-9602-a8397952c540-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.594321 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c77ea68b-9a3e-4579-84d5-f13aab92e17c","Type":"ContainerStarted","Data":"b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73"} Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.594373 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c77ea68b-9a3e-4579-84d5-f13aab92e17c","Type":"ContainerStarted","Data":"c8e8676c6ab84dd27115ebd9408546a27af7c67ed1eb066fd43a6b0eee2fa21b"} Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.596700 4711 generic.go:334] "Generic (PLEG): container finished" podID="001b7802-4a74-4409-8667-128892193313" containerID="2bdfb993dfe4a10d9fbe22bbb422bd944d224d3a39975fe62edca57dd4e16fc2" exitCode=0 Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.596775 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-sgvbg" event={"ID":"001b7802-4a74-4409-8667-128892193313","Type":"ContainerDied","Data":"2bdfb993dfe4a10d9fbe22bbb422bd944d224d3a39975fe62edca57dd4e16fc2"} Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.599972 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b7cbaee1-6b39-409e-b338-660c4a6ba023","Type":"ContainerStarted","Data":"fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527"} Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.599999 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b7cbaee1-6b39-409e-b338-660c4a6ba023","Type":"ContainerStarted","Data":"a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713"} Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.603612 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" event={"ID":"49030595-188e-44f8-9602-a8397952c540","Type":"ContainerDied","Data":"99249e91910199593d8870ded3a7c0f1117c33a7e6760bbe38bfc95b6d403ac5"} Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.603650 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b748ccb4c-d9fwx" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.603670 4711 scope.go:117] "RemoveContainer" containerID="f59e27b825d461dcededbbe1849736de5618d1c2e13f6a2ddd822fe459259077" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.630095 4711 scope.go:117] "RemoveContainer" containerID="f40d4b3874250828a227c9e9bdcdbd2e21d9d581da2e75ca24b5fdfea94fe514" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.635156 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.6351382819999998 podStartE2EDuration="2.635138282s" podCreationTimestamp="2025-12-05 12:32:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:32:43.613286885 +0000 UTC m=+1409.197609255" watchObservedRunningTime="2025-12-05 12:32:43.635138282 +0000 UTC m=+1409.219460612" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.697291 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.6972542 podStartE2EDuration="2.6972542s" podCreationTimestamp="2025-12-05 12:32:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:32:43.675380002 +0000 UTC m=+1409.259702332" watchObservedRunningTime="2025-12-05 12:32:43.6972542 +0000 UTC m=+1409.281576530" Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.724479 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b748ccb4c-d9fwx"] Dec 05 12:32:43 crc kubenswrapper[4711]: I1205 12:32:43.737987 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b748ccb4c-d9fwx"] Dec 05 12:32:44 crc kubenswrapper[4711]: I1205 12:32:44.696354 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49030595-188e-44f8-9602-a8397952c540" path="/var/lib/kubelet/pods/49030595-188e-44f8-9602-a8397952c540/volumes" Dec 05 12:32:44 crc kubenswrapper[4711]: I1205 12:32:44.991842 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.090027 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-config-data\") pod \"001b7802-4a74-4409-8667-128892193313\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.090112 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-scripts\") pod \"001b7802-4a74-4409-8667-128892193313\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.090175 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-combined-ca-bundle\") pod \"001b7802-4a74-4409-8667-128892193313\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.090441 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5rxf\" (UniqueName: \"kubernetes.io/projected/001b7802-4a74-4409-8667-128892193313-kube-api-access-x5rxf\") pod \"001b7802-4a74-4409-8667-128892193313\" (UID: \"001b7802-4a74-4409-8667-128892193313\") " Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.095972 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/001b7802-4a74-4409-8667-128892193313-kube-api-access-x5rxf" (OuterVolumeSpecName: "kube-api-access-x5rxf") pod "001b7802-4a74-4409-8667-128892193313" (UID: "001b7802-4a74-4409-8667-128892193313"). InnerVolumeSpecName "kube-api-access-x5rxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.109671 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-scripts" (OuterVolumeSpecName: "scripts") pod "001b7802-4a74-4409-8667-128892193313" (UID: "001b7802-4a74-4409-8667-128892193313"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.135552 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-config-data" (OuterVolumeSpecName: "config-data") pod "001b7802-4a74-4409-8667-128892193313" (UID: "001b7802-4a74-4409-8667-128892193313"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.136132 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "001b7802-4a74-4409-8667-128892193313" (UID: "001b7802-4a74-4409-8667-128892193313"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.193203 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.193248 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.193261 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5rxf\" (UniqueName: \"kubernetes.io/projected/001b7802-4a74-4409-8667-128892193313-kube-api-access-x5rxf\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.193276 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/001b7802-4a74-4409-8667-128892193313-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.633868 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-sgvbg" event={"ID":"001b7802-4a74-4409-8667-128892193313","Type":"ContainerDied","Data":"f3f5a2548b4925dc7b6ed02b9bf3ae73fa9b76258ff32aec8aa433812fe3d314"} Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.633914 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3f5a2548b4925dc7b6ed02b9bf3ae73fa9b76258ff32aec8aa433812fe3d314" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.633922 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-sgvbg" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.703445 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 12:32:45 crc kubenswrapper[4711]: E1205 12:32:45.703838 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49030595-188e-44f8-9602-a8397952c540" containerName="dnsmasq-dns" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.703853 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="49030595-188e-44f8-9602-a8397952c540" containerName="dnsmasq-dns" Dec 05 12:32:45 crc kubenswrapper[4711]: E1205 12:32:45.703867 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="001b7802-4a74-4409-8667-128892193313" containerName="nova-cell1-conductor-db-sync" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.703878 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="001b7802-4a74-4409-8667-128892193313" containerName="nova-cell1-conductor-db-sync" Dec 05 12:32:45 crc kubenswrapper[4711]: E1205 12:32:45.703928 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49030595-188e-44f8-9602-a8397952c540" containerName="init" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.703934 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="49030595-188e-44f8-9602-a8397952c540" containerName="init" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.704121 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="49030595-188e-44f8-9602-a8397952c540" containerName="dnsmasq-dns" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.704147 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="001b7802-4a74-4409-8667-128892193313" containerName="nova-cell1-conductor-db-sync" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.704833 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.709622 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.729374 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.804975 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdjvg\" (UniqueName: \"kubernetes.io/projected/739542f3-a693-4b21-9f9c-dba893c7b3f0-kube-api-access-vdjvg\") pod \"nova-cell1-conductor-0\" (UID: \"739542f3-a693-4b21-9f9c-dba893c7b3f0\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.805033 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/739542f3-a693-4b21-9f9c-dba893c7b3f0-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"739542f3-a693-4b21-9f9c-dba893c7b3f0\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.805070 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/739542f3-a693-4b21-9f9c-dba893c7b3f0-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"739542f3-a693-4b21-9f9c-dba893c7b3f0\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.907461 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdjvg\" (UniqueName: \"kubernetes.io/projected/739542f3-a693-4b21-9f9c-dba893c7b3f0-kube-api-access-vdjvg\") pod \"nova-cell1-conductor-0\" (UID: \"739542f3-a693-4b21-9f9c-dba893c7b3f0\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.907514 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/739542f3-a693-4b21-9f9c-dba893c7b3f0-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"739542f3-a693-4b21-9f9c-dba893c7b3f0\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.907537 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/739542f3-a693-4b21-9f9c-dba893c7b3f0-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"739542f3-a693-4b21-9f9c-dba893c7b3f0\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.912336 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/739542f3-a693-4b21-9f9c-dba893c7b3f0-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"739542f3-a693-4b21-9f9c-dba893c7b3f0\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.913163 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/739542f3-a693-4b21-9f9c-dba893c7b3f0-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"739542f3-a693-4b21-9f9c-dba893c7b3f0\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:45 crc kubenswrapper[4711]: I1205 12:32:45.937064 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdjvg\" (UniqueName: \"kubernetes.io/projected/739542f3-a693-4b21-9f9c-dba893c7b3f0-kube-api-access-vdjvg\") pod \"nova-cell1-conductor-0\" (UID: \"739542f3-a693-4b21-9f9c-dba893c7b3f0\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:46 crc kubenswrapper[4711]: I1205 12:32:46.025088 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:46 crc kubenswrapper[4711]: I1205 12:32:46.482151 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 12:32:46 crc kubenswrapper[4711]: I1205 12:32:46.646107 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"739542f3-a693-4b21-9f9c-dba893c7b3f0","Type":"ContainerStarted","Data":"c2c29e21525d4b893a159a030aa9fd15332616cc4f1fbf1eff80f0a119b64e62"} Dec 05 12:32:46 crc kubenswrapper[4711]: I1205 12:32:46.646176 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"739542f3-a693-4b21-9f9c-dba893c7b3f0","Type":"ContainerStarted","Data":"24137919975c19e3bbde817642ac7b59f7079cfbf0d9ef51ddaf748eb155798e"} Dec 05 12:32:46 crc kubenswrapper[4711]: I1205 12:32:46.646279 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:46 crc kubenswrapper[4711]: I1205 12:32:46.671378 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.67134614 podStartE2EDuration="1.67134614s" podCreationTimestamp="2025-12-05 12:32:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:32:46.66081052 +0000 UTC m=+1412.245132850" watchObservedRunningTime="2025-12-05 12:32:46.67134614 +0000 UTC m=+1412.255668460" Dec 05 12:32:47 crc kubenswrapper[4711]: I1205 12:32:47.420703 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 12:32:50 crc kubenswrapper[4711]: E1205 12:32:50.852202 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="310df0f2ff941e827e368e2de9a5dc404af9dcf7895906b99970cd36b72e2c6a" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Dec 05 12:32:50 crc kubenswrapper[4711]: E1205 12:32:50.854217 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="310df0f2ff941e827e368e2de9a5dc404af9dcf7895906b99970cd36b72e2c6a" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Dec 05 12:32:50 crc kubenswrapper[4711]: E1205 12:32:50.855606 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="310df0f2ff941e827e368e2de9a5dc404af9dcf7895906b99970cd36b72e2c6a" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Dec 05 12:32:50 crc kubenswrapper[4711]: E1205 12:32:50.855658 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/watcher-decision-engine-0" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:32:51 crc kubenswrapper[4711]: I1205 12:32:51.052657 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 12:32:52 crc kubenswrapper[4711]: I1205 12:32:52.009984 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:32:52 crc kubenswrapper[4711]: I1205 12:32:52.010062 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:32:52 crc kubenswrapper[4711]: I1205 12:32:52.421075 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 12:32:52 crc kubenswrapper[4711]: I1205 12:32:52.451342 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 12:32:52 crc kubenswrapper[4711]: I1205 12:32:52.609099 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 12:32:52 crc kubenswrapper[4711]: I1205 12:32:52.761926 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 12:32:53 crc kubenswrapper[4711]: I1205 12:32:53.092706 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.209:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:32:53 crc kubenswrapper[4711]: I1205 12:32:53.092712 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.209:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:32:56 crc kubenswrapper[4711]: I1205 12:32:56.377750 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:32:56 crc kubenswrapper[4711]: I1205 12:32:56.378254 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="5959fa07-c68d-41dc-ba4a-e68360ec28d2" containerName="kube-state-metrics" containerID="cri-o://badd9af53c97661086d8fd448889c0a4f1bb2ac5889f4182c69f6365348282c3" gracePeriod=30 Dec 05 12:32:56 crc kubenswrapper[4711]: I1205 12:32:56.780522 4711 generic.go:334] "Generic (PLEG): container finished" podID="5959fa07-c68d-41dc-ba4a-e68360ec28d2" containerID="badd9af53c97661086d8fd448889c0a4f1bb2ac5889f4182c69f6365348282c3" exitCode=2 Dec 05 12:32:56 crc kubenswrapper[4711]: I1205 12:32:56.780654 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5959fa07-c68d-41dc-ba4a-e68360ec28d2","Type":"ContainerDied","Data":"badd9af53c97661086d8fd448889c0a4f1bb2ac5889f4182c69f6365348282c3"} Dec 05 12:32:56 crc kubenswrapper[4711]: I1205 12:32:56.917231 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.045209 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25g4q\" (UniqueName: \"kubernetes.io/projected/5959fa07-c68d-41dc-ba4a-e68360ec28d2-kube-api-access-25g4q\") pod \"5959fa07-c68d-41dc-ba4a-e68360ec28d2\" (UID: \"5959fa07-c68d-41dc-ba4a-e68360ec28d2\") " Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.050869 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5959fa07-c68d-41dc-ba4a-e68360ec28d2-kube-api-access-25g4q" (OuterVolumeSpecName: "kube-api-access-25g4q") pod "5959fa07-c68d-41dc-ba4a-e68360ec28d2" (UID: "5959fa07-c68d-41dc-ba4a-e68360ec28d2"). InnerVolumeSpecName "kube-api-access-25g4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.147637 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25g4q\" (UniqueName: \"kubernetes.io/projected/5959fa07-c68d-41dc-ba4a-e68360ec28d2-kube-api-access-25g4q\") on node \"crc\" DevicePath \"\"" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.791521 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5959fa07-c68d-41dc-ba4a-e68360ec28d2","Type":"ContainerDied","Data":"c38f6105aa79104bff0016fc2ecbb7268bd73bd64256c8d8df946c2d1b5588b1"} Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.791840 4711 scope.go:117] "RemoveContainer" containerID="badd9af53c97661086d8fd448889c0a4f1bb2ac5889f4182c69f6365348282c3" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.791695 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.841433 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.864934 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.879116 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:32:57 crc kubenswrapper[4711]: E1205 12:32:57.879911 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5959fa07-c68d-41dc-ba4a-e68360ec28d2" containerName="kube-state-metrics" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.879943 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5959fa07-c68d-41dc-ba4a-e68360ec28d2" containerName="kube-state-metrics" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.880224 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5959fa07-c68d-41dc-ba4a-e68360ec28d2" containerName="kube-state-metrics" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.881315 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.883632 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.883827 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.889644 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.963764 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6f039111-5426-4f30-8101-1629afd4c8dc-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.964415 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f039111-5426-4f30-8101-1629afd4c8dc-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.964571 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnz59\" (UniqueName: \"kubernetes.io/projected/6f039111-5426-4f30-8101-1629afd4c8dc-kube-api-access-qnz59\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:57 crc kubenswrapper[4711]: I1205 12:32:57.964870 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f039111-5426-4f30-8101-1629afd4c8dc-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.066104 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f039111-5426-4f30-8101-1629afd4c8dc-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.066220 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6f039111-5426-4f30-8101-1629afd4c8dc-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.067152 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f039111-5426-4f30-8101-1629afd4c8dc-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.067212 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnz59\" (UniqueName: \"kubernetes.io/projected/6f039111-5426-4f30-8101-1629afd4c8dc-kube-api-access-qnz59\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.072701 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f039111-5426-4f30-8101-1629afd4c8dc-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.073154 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f039111-5426-4f30-8101-1629afd4c8dc-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.076200 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6f039111-5426-4f30-8101-1629afd4c8dc-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.090650 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnz59\" (UniqueName: \"kubernetes.io/projected/6f039111-5426-4f30-8101-1629afd4c8dc-kube-api-access-qnz59\") pod \"kube-state-metrics-0\" (UID: \"6f039111-5426-4f30-8101-1629afd4c8dc\") " pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.205154 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.447184 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.447535 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="ceilometer-central-agent" containerID="cri-o://679e00bf4c5ad46d64af8c6314438a0a404db3fa1b8428a940ce07b7590606f2" gracePeriod=30 Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.447731 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="proxy-httpd" containerID="cri-o://d17cff6afafd29fcff06414470bf216f09404c94134124f5ded90fe02df6b87a" gracePeriod=30 Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.447842 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="ceilometer-notification-agent" containerID="cri-o://08c8ecfe5d4ea1215468a034aca450c8773d3fefb21f54a43c21282f84436c65" gracePeriod=30 Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.447940 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="sg-core" containerID="cri-o://b55b9b38d2838b2d27e9280f494ec8d102c5988a8c4904e67f527c4fb20805fd" gracePeriod=30 Dec 05 12:32:58 crc kubenswrapper[4711]: W1205 12:32:58.707248 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f039111_5426_4f30_8101_1629afd4c8dc.slice/crio-ea9b5f282847c2aa753222dd9e6ad1c35efe28772d8e9b2f6f6215d0fc2fe9f5 WatchSource:0}: Error finding container ea9b5f282847c2aa753222dd9e6ad1c35efe28772d8e9b2f6f6215d0fc2fe9f5: Status 404 returned error can't find the container with id ea9b5f282847c2aa753222dd9e6ad1c35efe28772d8e9b2f6f6215d0fc2fe9f5 Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.886152 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5959fa07-c68d-41dc-ba4a-e68360ec28d2" path="/var/lib/kubelet/pods/5959fa07-c68d-41dc-ba4a-e68360ec28d2/volumes" Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.887360 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:32:58 crc kubenswrapper[4711]: I1205 12:32:58.887447 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"6f039111-5426-4f30-8101-1629afd4c8dc","Type":"ContainerStarted","Data":"ea9b5f282847c2aa753222dd9e6ad1c35efe28772d8e9b2f6f6215d0fc2fe9f5"} Dec 05 12:32:59 crc kubenswrapper[4711]: I1205 12:32:59.811892 4711 generic.go:334] "Generic (PLEG): container finished" podID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerID="d17cff6afafd29fcff06414470bf216f09404c94134124f5ded90fe02df6b87a" exitCode=0 Dec 05 12:32:59 crc kubenswrapper[4711]: I1205 12:32:59.812238 4711 generic.go:334] "Generic (PLEG): container finished" podID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerID="b55b9b38d2838b2d27e9280f494ec8d102c5988a8c4904e67f527c4fb20805fd" exitCode=2 Dec 05 12:32:59 crc kubenswrapper[4711]: I1205 12:32:59.812258 4711 generic.go:334] "Generic (PLEG): container finished" podID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerID="679e00bf4c5ad46d64af8c6314438a0a404db3fa1b8428a940ce07b7590606f2" exitCode=0 Dec 05 12:32:59 crc kubenswrapper[4711]: I1205 12:32:59.812287 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerDied","Data":"d17cff6afafd29fcff06414470bf216f09404c94134124f5ded90fe02df6b87a"} Dec 05 12:32:59 crc kubenswrapper[4711]: I1205 12:32:59.812311 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerDied","Data":"b55b9b38d2838b2d27e9280f494ec8d102c5988a8c4904e67f527c4fb20805fd"} Dec 05 12:32:59 crc kubenswrapper[4711]: I1205 12:32:59.812321 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerDied","Data":"679e00bf4c5ad46d64af8c6314438a0a404db3fa1b8428a940ce07b7590606f2"} Dec 05 12:33:00 crc kubenswrapper[4711]: I1205 12:33:00.822287 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"6f039111-5426-4f30-8101-1629afd4c8dc","Type":"ContainerStarted","Data":"288dc395ca2655cfe71020de9d32f94c92dd7cbb293ed20b96277cd75ff212cc"} Dec 05 12:33:00 crc kubenswrapper[4711]: I1205 12:33:00.822635 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:01.837102 4711 generic.go:334] "Generic (PLEG): container finished" podID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerID="08c8ecfe5d4ea1215468a034aca450c8773d3fefb21f54a43c21282f84436c65" exitCode=0 Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:01.837308 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerDied","Data":"08c8ecfe5d4ea1215468a034aca450c8773d3fefb21f54a43c21282f84436c65"} Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:01.929911 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:01.957215 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.417617477 podStartE2EDuration="4.957189674s" podCreationTimestamp="2025-12-05 12:32:57 +0000 UTC" firstStartedPulling="2025-12-05 12:32:58.709484885 +0000 UTC m=+1424.293807205" lastFinishedPulling="2025-12-05 12:33:00.249057072 +0000 UTC m=+1425.833379402" observedRunningTime="2025-12-05 12:33:00.847953829 +0000 UTC m=+1426.432276149" watchObservedRunningTime="2025-12-05 12:33:01.957189674 +0000 UTC m=+1427.541512004" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.019081 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.019961 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.022544 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.025416 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.051459 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-sg-core-conf-yaml\") pod \"01830ceb-a989-4c58-ad76-b63f213e6a4b\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.051558 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsfsn\" (UniqueName: \"kubernetes.io/projected/01830ceb-a989-4c58-ad76-b63f213e6a4b-kube-api-access-qsfsn\") pod \"01830ceb-a989-4c58-ad76-b63f213e6a4b\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.051621 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-run-httpd\") pod \"01830ceb-a989-4c58-ad76-b63f213e6a4b\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.051643 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-combined-ca-bundle\") pod \"01830ceb-a989-4c58-ad76-b63f213e6a4b\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.051693 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-scripts\") pod \"01830ceb-a989-4c58-ad76-b63f213e6a4b\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.051770 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-config-data\") pod \"01830ceb-a989-4c58-ad76-b63f213e6a4b\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.051844 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-log-httpd\") pod \"01830ceb-a989-4c58-ad76-b63f213e6a4b\" (UID: \"01830ceb-a989-4c58-ad76-b63f213e6a4b\") " Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.052157 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "01830ceb-a989-4c58-ad76-b63f213e6a4b" (UID: "01830ceb-a989-4c58-ad76-b63f213e6a4b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.052455 4711 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.057278 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "01830ceb-a989-4c58-ad76-b63f213e6a4b" (UID: "01830ceb-a989-4c58-ad76-b63f213e6a4b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.059900 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01830ceb-a989-4c58-ad76-b63f213e6a4b-kube-api-access-qsfsn" (OuterVolumeSpecName: "kube-api-access-qsfsn") pod "01830ceb-a989-4c58-ad76-b63f213e6a4b" (UID: "01830ceb-a989-4c58-ad76-b63f213e6a4b"). InnerVolumeSpecName "kube-api-access-qsfsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.061779 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-scripts" (OuterVolumeSpecName: "scripts") pod "01830ceb-a989-4c58-ad76-b63f213e6a4b" (UID: "01830ceb-a989-4c58-ad76-b63f213e6a4b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.093956 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "01830ceb-a989-4c58-ad76-b63f213e6a4b" (UID: "01830ceb-a989-4c58-ad76-b63f213e6a4b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.147030 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01830ceb-a989-4c58-ad76-b63f213e6a4b" (UID: "01830ceb-a989-4c58-ad76-b63f213e6a4b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.153827 4711 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01830ceb-a989-4c58-ad76-b63f213e6a4b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.153851 4711 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.154116 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsfsn\" (UniqueName: \"kubernetes.io/projected/01830ceb-a989-4c58-ad76-b63f213e6a4b-kube-api-access-qsfsn\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.154126 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.154134 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.154682 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-config-data" (OuterVolumeSpecName: "config-data") pod "01830ceb-a989-4c58-ad76-b63f213e6a4b" (UID: "01830ceb-a989-4c58-ad76-b63f213e6a4b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.255757 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01830ceb-a989-4c58-ad76-b63f213e6a4b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.849329 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01830ceb-a989-4c58-ad76-b63f213e6a4b","Type":"ContainerDied","Data":"08abb37f2655fb7799c47b159456aa19f3eab8d2ed2c5b2c1df16b2e8bcf25b8"} Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.849420 4711 scope.go:117] "RemoveContainer" containerID="d17cff6afafd29fcff06414470bf216f09404c94134124f5ded90fe02df6b87a" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.849468 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.849887 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.856508 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.874033 4711 scope.go:117] "RemoveContainer" containerID="b55b9b38d2838b2d27e9280f494ec8d102c5988a8c4904e67f527c4fb20805fd" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.880752 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.902582 4711 scope.go:117] "RemoveContainer" containerID="08c8ecfe5d4ea1215468a034aca450c8773d3fefb21f54a43c21282f84436c65" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.911778 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.924021 4711 scope.go:117] "RemoveContainer" containerID="679e00bf4c5ad46d64af8c6314438a0a404db3fa1b8428a940ce07b7590606f2" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.929033 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:02 crc kubenswrapper[4711]: E1205 12:33:02.929491 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="sg-core" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.929508 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="sg-core" Dec 05 12:33:02 crc kubenswrapper[4711]: E1205 12:33:02.929526 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="ceilometer-central-agent" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.929536 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="ceilometer-central-agent" Dec 05 12:33:02 crc kubenswrapper[4711]: E1205 12:33:02.929569 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="ceilometer-notification-agent" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.929579 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="ceilometer-notification-agent" Dec 05 12:33:02 crc kubenswrapper[4711]: E1205 12:33:02.929594 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="proxy-httpd" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.929601 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="proxy-httpd" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.929815 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="sg-core" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.929833 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="proxy-httpd" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.929844 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="ceilometer-notification-agent" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.929860 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" containerName="ceilometer-central-agent" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.931923 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.936856 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.937203 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.937368 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.967830 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-log-httpd\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.967964 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.968263 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.968350 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-scripts\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.968375 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-config-data\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.968421 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8xvm\" (UniqueName: \"kubernetes.io/projected/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-kube-api-access-m8xvm\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.968473 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.968545 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-run-httpd\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:02 crc kubenswrapper[4711]: I1205 12:33:02.984804 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.062475 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69cc8c865c-d9tkn"] Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.064269 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.070057 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.070109 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-scripts\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.070132 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-config-data\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.070147 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8xvm\" (UniqueName: \"kubernetes.io/projected/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-kube-api-access-m8xvm\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.070170 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.070194 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-run-httpd\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.070246 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-log-httpd\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.070356 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.077257 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.077548 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.077583 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-run-httpd\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.077770 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-log-httpd\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.085641 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-config-data\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.086458 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-scripts\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.091701 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.092018 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69cc8c865c-d9tkn"] Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.093474 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8xvm\" (UniqueName: \"kubernetes.io/projected/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-kube-api-access-m8xvm\") pod \"ceilometer-0\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.173938 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-config\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.174059 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5p5c\" (UniqueName: \"kubernetes.io/projected/3c86ec8c-e349-4b7b-a4c8-21778f352703-kube-api-access-s5p5c\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.174120 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-nb\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.174241 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-svc\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.174265 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-swift-storage-0\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.174288 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-sb\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.262813 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.276227 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5p5c\" (UniqueName: \"kubernetes.io/projected/3c86ec8c-e349-4b7b-a4c8-21778f352703-kube-api-access-s5p5c\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.276617 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-nb\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.276717 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-svc\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.276737 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-swift-storage-0\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.276756 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-sb\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.276803 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-config\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.277720 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-swift-storage-0\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.277719 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-nb\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.277827 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-config\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.277944 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-sb\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.278481 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-svc\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.292777 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5p5c\" (UniqueName: \"kubernetes.io/projected/3c86ec8c-e349-4b7b-a4c8-21778f352703-kube-api-access-s5p5c\") pod \"dnsmasq-dns-69cc8c865c-d9tkn\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.479616 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.752720 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.874232 4711 generic.go:334] "Generic (PLEG): container finished" podID="907e0f25-2faa-4e14-a121-a5b31b285b7c" containerID="bad9ac51a1a41308edb5c2832c87c028f49e13e6c9c25ff1d355d5e8f71eb77f" exitCode=137 Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.874460 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"907e0f25-2faa-4e14-a121-a5b31b285b7c","Type":"ContainerDied","Data":"bad9ac51a1a41308edb5c2832c87c028f49e13e6c9c25ff1d355d5e8f71eb77f"} Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.877044 4711 generic.go:334] "Generic (PLEG): container finished" podID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerID="310df0f2ff941e827e368e2de9a5dc404af9dcf7895906b99970cd36b72e2c6a" exitCode=137 Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.878120 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerDied","Data":"310df0f2ff941e827e368e2de9a5dc404af9dcf7895906b99970cd36b72e2c6a"} Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.878198 4711 scope.go:117] "RemoveContainer" containerID="5d60566f39f2759b59c1362303dd84252e2514cdfbbb90b6425c8785219ff191" Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.882255 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerStarted","Data":"7808074b45eb1336395e7cf5217c39c8f5c60a142a48bccc87f35d79bb386f69"} Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.909137 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"740e9907-2689-4775-bf06-78517da985f7","Type":"ContainerDied","Data":"b98eded8dfe31315b1fd5c070239621819f6e4214d67a119146b37cca7ff0447"} Dec 05 12:33:03 crc kubenswrapper[4711]: I1205 12:33:03.910897 4711 generic.go:334] "Generic (PLEG): container finished" podID="740e9907-2689-4775-bf06-78517da985f7" containerID="b98eded8dfe31315b1fd5c070239621819f6e4214d67a119146b37cca7ff0447" exitCode=137 Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.061931 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.081928 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.092559 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-combined-ca-bundle\") pod \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.092772 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-config-data\") pod \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.092944 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-combined-ca-bundle\") pod \"740e9907-2689-4775-bf06-78517da985f7\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.093062 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g822z\" (UniqueName: \"kubernetes.io/projected/740e9907-2689-4775-bf06-78517da985f7-kube-api-access-g822z\") pod \"740e9907-2689-4775-bf06-78517da985f7\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.093988 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjnwp\" (UniqueName: \"kubernetes.io/projected/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-kube-api-access-fjnwp\") pod \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.094097 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-config-data\") pod \"740e9907-2689-4775-bf06-78517da985f7\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.094234 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-logs\") pod \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.094308 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/740e9907-2689-4775-bf06-78517da985f7-logs\") pod \"740e9907-2689-4775-bf06-78517da985f7\" (UID: \"740e9907-2689-4775-bf06-78517da985f7\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.094415 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-custom-prometheus-ca\") pod \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\" (UID: \"2fe97f8e-7a0e-40ef-8cab-3530224b79ee\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.097768 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-logs" (OuterVolumeSpecName: "logs") pod "2fe97f8e-7a0e-40ef-8cab-3530224b79ee" (UID: "2fe97f8e-7a0e-40ef-8cab-3530224b79ee"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.098544 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/740e9907-2689-4775-bf06-78517da985f7-logs" (OuterVolumeSpecName: "logs") pod "740e9907-2689-4775-bf06-78517da985f7" (UID: "740e9907-2689-4775-bf06-78517da985f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.118125 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/740e9907-2689-4775-bf06-78517da985f7-kube-api-access-g822z" (OuterVolumeSpecName: "kube-api-access-g822z") pod "740e9907-2689-4775-bf06-78517da985f7" (UID: "740e9907-2689-4775-bf06-78517da985f7"). InnerVolumeSpecName "kube-api-access-g822z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.119492 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-kube-api-access-fjnwp" (OuterVolumeSpecName: "kube-api-access-fjnwp") pod "2fe97f8e-7a0e-40ef-8cab-3530224b79ee" (UID: "2fe97f8e-7a0e-40ef-8cab-3530224b79ee"). InnerVolumeSpecName "kube-api-access-fjnwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.144243 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.198546 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-combined-ca-bundle\") pod \"907e0f25-2faa-4e14-a121-a5b31b285b7c\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.199143 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwhbf\" (UniqueName: \"kubernetes.io/projected/907e0f25-2faa-4e14-a121-a5b31b285b7c-kube-api-access-hwhbf\") pod \"907e0f25-2faa-4e14-a121-a5b31b285b7c\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.199259 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-config-data\") pod \"907e0f25-2faa-4e14-a121-a5b31b285b7c\" (UID: \"907e0f25-2faa-4e14-a121-a5b31b285b7c\") " Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.199979 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjnwp\" (UniqueName: \"kubernetes.io/projected/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-kube-api-access-fjnwp\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.200046 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.200096 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/740e9907-2689-4775-bf06-78517da985f7-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.200142 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g822z\" (UniqueName: \"kubernetes.io/projected/740e9907-2689-4775-bf06-78517da985f7-kube-api-access-g822z\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.202443 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "740e9907-2689-4775-bf06-78517da985f7" (UID: "740e9907-2689-4775-bf06-78517da985f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.211543 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69cc8c865c-d9tkn"] Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.216717 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/907e0f25-2faa-4e14-a121-a5b31b285b7c-kube-api-access-hwhbf" (OuterVolumeSpecName: "kube-api-access-hwhbf") pod "907e0f25-2faa-4e14-a121-a5b31b285b7c" (UID: "907e0f25-2faa-4e14-a121-a5b31b285b7c"). InnerVolumeSpecName "kube-api-access-hwhbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.271528 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "2fe97f8e-7a0e-40ef-8cab-3530224b79ee" (UID: "2fe97f8e-7a0e-40ef-8cab-3530224b79ee"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.295605 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2fe97f8e-7a0e-40ef-8cab-3530224b79ee" (UID: "2fe97f8e-7a0e-40ef-8cab-3530224b79ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.305530 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwhbf\" (UniqueName: \"kubernetes.io/projected/907e0f25-2faa-4e14-a121-a5b31b285b7c-kube-api-access-hwhbf\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.305570 4711 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.305610 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.305624 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.310447 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-config-data" (OuterVolumeSpecName: "config-data") pod "740e9907-2689-4775-bf06-78517da985f7" (UID: "740e9907-2689-4775-bf06-78517da985f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.312242 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "907e0f25-2faa-4e14-a121-a5b31b285b7c" (UID: "907e0f25-2faa-4e14-a121-a5b31b285b7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.315943 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-config-data" (OuterVolumeSpecName: "config-data") pod "907e0f25-2faa-4e14-a121-a5b31b285b7c" (UID: "907e0f25-2faa-4e14-a121-a5b31b285b7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.336359 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-config-data" (OuterVolumeSpecName: "config-data") pod "2fe97f8e-7a0e-40ef-8cab-3530224b79ee" (UID: "2fe97f8e-7a0e-40ef-8cab-3530224b79ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.407491 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.407527 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe97f8e-7a0e-40ef-8cab-3530224b79ee-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.407543 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/907e0f25-2faa-4e14-a121-a5b31b285b7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.407626 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/740e9907-2689-4775-bf06-78517da985f7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.696587 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01830ceb-a989-4c58-ad76-b63f213e6a4b" path="/var/lib/kubelet/pods/01830ceb-a989-4c58-ad76-b63f213e6a4b/volumes" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.925572 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"907e0f25-2faa-4e14-a121-a5b31b285b7c","Type":"ContainerDied","Data":"40fdf991462df1b42c795e7351ba9b0bf27d2271fe3534b4e19651f27117b838"} Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.925604 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.925653 4711 scope.go:117] "RemoveContainer" containerID="bad9ac51a1a41308edb5c2832c87c028f49e13e6c9c25ff1d355d5e8f71eb77f" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.927200 4711 generic.go:334] "Generic (PLEG): container finished" podID="3c86ec8c-e349-4b7b-a4c8-21778f352703" containerID="e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000" exitCode=0 Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.927283 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" event={"ID":"3c86ec8c-e349-4b7b-a4c8-21778f352703","Type":"ContainerDied","Data":"e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000"} Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.927313 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" event={"ID":"3c86ec8c-e349-4b7b-a4c8-21778f352703","Type":"ContainerStarted","Data":"ce0cad81d4452eaabf20d8be9a0f5e5d8c833acf62fba203b584cf52013acc97"} Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.941190 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"2fe97f8e-7a0e-40ef-8cab-3530224b79ee","Type":"ContainerDied","Data":"2139433d5e48ecf1a25ac6e999fc349ca3d877e06d625eb1ff26540a348ef6bb"} Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.941267 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.960363 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerStarted","Data":"c9de4a9d2086b75c8dcdcda251da4cbb668ccfdacf7b013d37cdb6a1fd4fe7be"} Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.960424 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerStarted","Data":"bed195147d18d0abce6eff4307a28fe785577fd4dc8372ce1abd54b413770f26"} Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.974525 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:33:04 crc kubenswrapper[4711]: I1205 12:33:04.975156 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"740e9907-2689-4775-bf06-78517da985f7","Type":"ContainerDied","Data":"96fa96bc44cbc86d605b7c7ebcdb3a343cac6b1d7f0daa964f999069663fbd90"} Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.015233 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.019747 4711 scope.go:117] "RemoveContainer" containerID="310df0f2ff941e827e368e2de9a5dc404af9dcf7895906b99970cd36b72e2c6a" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.071271 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.080439 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: E1205 12:33:05.080965 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.081035 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: E1205 12:33:05.081110 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.081157 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: E1205 12:33:05.081210 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.081254 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: E1205 12:33:05.081299 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="740e9907-2689-4775-bf06-78517da985f7" containerName="nova-metadata-metadata" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.081344 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="740e9907-2689-4775-bf06-78517da985f7" containerName="nova-metadata-metadata" Dec 05 12:33:05 crc kubenswrapper[4711]: E1205 12:33:05.081423 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.081472 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: E1205 12:33:05.081539 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="907e0f25-2faa-4e14-a121-a5b31b285b7c" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.081587 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="907e0f25-2faa-4e14-a121-a5b31b285b7c" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 12:33:05 crc kubenswrapper[4711]: E1205 12:33:05.081641 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="740e9907-2689-4775-bf06-78517da985f7" containerName="nova-metadata-log" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.081686 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="740e9907-2689-4775-bf06-78517da985f7" containerName="nova-metadata-log" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.081933 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="740e9907-2689-4775-bf06-78517da985f7" containerName="nova-metadata-metadata" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.082056 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="907e0f25-2faa-4e14-a121-a5b31b285b7c" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.082137 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.082245 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.082446 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="740e9907-2689-4775-bf06-78517da985f7" containerName="nova-metadata-log" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.082758 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.082827 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.082878 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.083599 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.085576 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.089805 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.090891 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.101782 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.119328 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.119491 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.119542 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.119568 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.119603 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr6cl\" (UniqueName: \"kubernetes.io/projected/889e0ed2-a76b-42dd-901e-ff2707f8443d-kube-api-access-fr6cl\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.146972 4711 scope.go:117] "RemoveContainer" containerID="b98eded8dfe31315b1fd5c070239621819f6e4214d67a119146b37cca7ff0447" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.170943 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.193454 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.203761 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.220664 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.222221 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.222300 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.222329 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.222360 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr6cl\" (UniqueName: \"kubernetes.io/projected/889e0ed2-a76b-42dd-901e-ff2707f8443d-kube-api-access-fr6cl\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.222487 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.226125 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.231060 4711 scope.go:117] "RemoveContainer" containerID="4bd7f868a765e36f431707c4d5a312ff62d3879c7dbb47db534b0ab7e8bce13a" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.231105 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.231960 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.232894 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.245297 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr6cl\" (UniqueName: \"kubernetes.io/projected/889e0ed2-a76b-42dd-901e-ff2707f8443d-kube-api-access-fr6cl\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.262004 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/889e0ed2-a76b-42dd-901e-ff2707f8443d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"889e0ed2-a76b-42dd-901e-ff2707f8443d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.276464 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: E1205 12:33:05.276925 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.276938 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" containerName="watcher-decision-engine" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.278254 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.281424 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.282904 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.305841 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.316342 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.317875 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.322825 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.326421 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-config-data\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.326963 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bl96\" (UniqueName: \"kubernetes.io/projected/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-kube-api-access-7bl96\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.327065 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.327109 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.327164 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.327224 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-logs\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.428679 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.428749 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-config-data\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.428780 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-logs\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.428804 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.428949 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-logs\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.429028 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-config-data\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.429061 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.429080 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bl96\" (UniqueName: \"kubernetes.io/projected/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-kube-api-access-7bl96\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.429105 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfjqn\" (UniqueName: \"kubernetes.io/projected/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-kube-api-access-xfjqn\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.429130 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.429446 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-logs\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.432133 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.432615 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-config-data\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.435500 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.450965 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bl96\" (UniqueName: \"kubernetes.io/projected/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-kube-api-access-7bl96\") pod \"nova-metadata-0\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.482128 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.530633 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfjqn\" (UniqueName: \"kubernetes.io/projected/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-kube-api-access-xfjqn\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.530675 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.530745 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-config-data\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.530777 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-logs\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.530907 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.531346 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-logs\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.536016 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-config-data\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.536724 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.541234 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.549702 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfjqn\" (UniqueName: \"kubernetes.io/projected/560f4e5b-3ebe-4357-ad7d-d3fb4912b63f-kube-api-access-xfjqn\") pod \"watcher-decision-engine-0\" (UID: \"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.608850 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.648958 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:33:05 crc kubenswrapper[4711]: I1205 12:33:05.999221 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" event={"ID":"3c86ec8c-e349-4b7b-a4c8-21778f352703","Type":"ContainerStarted","Data":"16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632"} Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.008573 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.023852 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerStarted","Data":"f2d80d88bd00bd5e9dfa4a70f18f47ae9a59c8b4f347d2d59c4b3d68cb53257e"} Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.042254 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.104930 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" podStartSLOduration=3.104910533 podStartE2EDuration="3.104910533s" podCreationTimestamp="2025-12-05 12:33:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:33:06.041156676 +0000 UTC m=+1431.625478996" watchObservedRunningTime="2025-12-05 12:33:06.104910533 +0000 UTC m=+1431.689232863" Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.258100 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:06 crc kubenswrapper[4711]: W1205 12:33:06.260817 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58bb3765_e8c7_4bd7_9c64_0b5257d5a140.slice/crio-4e499a9c9dfa0019ef585304b7e776974f3506d45b32afa4f511453d70aabc8e WatchSource:0}: Error finding container 4e499a9c9dfa0019ef585304b7e776974f3506d45b32afa4f511453d70aabc8e: Status 404 returned error can't find the container with id 4e499a9c9dfa0019ef585304b7e776974f3506d45b32afa4f511453d70aabc8e Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.389091 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.696568 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fe97f8e-7a0e-40ef-8cab-3530224b79ee" path="/var/lib/kubelet/pods/2fe97f8e-7a0e-40ef-8cab-3530224b79ee/volumes" Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.697899 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="740e9907-2689-4775-bf06-78517da985f7" path="/var/lib/kubelet/pods/740e9907-2689-4775-bf06-78517da985f7/volumes" Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.698675 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="907e0f25-2faa-4e14-a121-a5b31b285b7c" path="/var/lib/kubelet/pods/907e0f25-2faa-4e14-a121-a5b31b285b7c/volumes" Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.799332 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.799692 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-api" containerID="cri-o://fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527" gracePeriod=30 Dec 05 12:33:06 crc kubenswrapper[4711]: I1205 12:33:06.799877 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-log" containerID="cri-o://a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713" gracePeriod=30 Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.105492 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f","Type":"ContainerStarted","Data":"08db42df4315757cb431e43be4c96efafdde6539740e6b26c290ce2a09e784f4"} Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.105743 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"560f4e5b-3ebe-4357-ad7d-d3fb4912b63f","Type":"ContainerStarted","Data":"97e8a26527d11e077e5e931eff13fcdf9c03eaffa42690587a6251d405675700"} Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.115356 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"58bb3765-e8c7-4bd7-9c64-0b5257d5a140","Type":"ContainerStarted","Data":"f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661"} Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.115453 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"58bb3765-e8c7-4bd7-9c64-0b5257d5a140","Type":"ContainerStarted","Data":"8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41"} Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.115466 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"58bb3765-e8c7-4bd7-9c64-0b5257d5a140","Type":"ContainerStarted","Data":"4e499a9c9dfa0019ef585304b7e776974f3506d45b32afa4f511453d70aabc8e"} Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.119250 4711 generic.go:334] "Generic (PLEG): container finished" podID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerID="a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713" exitCode=143 Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.119602 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b7cbaee1-6b39-409e-b338-660c4a6ba023","Type":"ContainerDied","Data":"a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713"} Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.135524 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"889e0ed2-a76b-42dd-901e-ff2707f8443d","Type":"ContainerStarted","Data":"d8cc6a048f9fcfd7723bdd4c8de6d0c0c24d1a441eb71d774ca92a6216c50432"} Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.135565 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"889e0ed2-a76b-42dd-901e-ff2707f8443d","Type":"ContainerStarted","Data":"b8325ecb6f4d37a92ae629d23c498f25cda46af83119507a153e33363dfa236a"} Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.166644 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=2.16662206 podStartE2EDuration="2.16662206s" podCreationTimestamp="2025-12-05 12:33:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:33:07.130353868 +0000 UTC m=+1432.714676198" watchObservedRunningTime="2025-12-05 12:33:07.16662206 +0000 UTC m=+1432.750944390" Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.200912 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.200891982 podStartE2EDuration="2.200891982s" podCreationTimestamp="2025-12-05 12:33:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:33:07.152434021 +0000 UTC m=+1432.736756361" watchObservedRunningTime="2025-12-05 12:33:07.200891982 +0000 UTC m=+1432.785214312" Dec 05 12:33:07 crc kubenswrapper[4711]: I1205 12:33:07.231529 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.231507455 podStartE2EDuration="3.231507455s" podCreationTimestamp="2025-12-05 12:33:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:33:07.174269948 +0000 UTC m=+1432.758592278" watchObservedRunningTime="2025-12-05 12:33:07.231507455 +0000 UTC m=+1432.815829785" Dec 05 12:33:08 crc kubenswrapper[4711]: I1205 12:33:08.146264 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerStarted","Data":"ed837451b5237f11e551ff07af0ee2467d43c5f9dfd6c4c18fa905101e8a18bb"} Dec 05 12:33:08 crc kubenswrapper[4711]: I1205 12:33:08.146726 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="ceilometer-central-agent" containerID="cri-o://bed195147d18d0abce6eff4307a28fe785577fd4dc8372ce1abd54b413770f26" gracePeriod=30 Dec 05 12:33:08 crc kubenswrapper[4711]: I1205 12:33:08.146823 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="proxy-httpd" containerID="cri-o://ed837451b5237f11e551ff07af0ee2467d43c5f9dfd6c4c18fa905101e8a18bb" gracePeriod=30 Dec 05 12:33:08 crc kubenswrapper[4711]: I1205 12:33:08.146860 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="sg-core" containerID="cri-o://f2d80d88bd00bd5e9dfa4a70f18f47ae9a59c8b4f347d2d59c4b3d68cb53257e" gracePeriod=30 Dec 05 12:33:08 crc kubenswrapper[4711]: I1205 12:33:08.146894 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="ceilometer-notification-agent" containerID="cri-o://c9de4a9d2086b75c8dcdcda251da4cbb668ccfdacf7b013d37cdb6a1fd4fe7be" gracePeriod=30 Dec 05 12:33:08 crc kubenswrapper[4711]: I1205 12:33:08.180851 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.942705405 podStartE2EDuration="6.180829888s" podCreationTimestamp="2025-12-05 12:33:02 +0000 UTC" firstStartedPulling="2025-12-05 12:33:03.771576099 +0000 UTC m=+1429.355898429" lastFinishedPulling="2025-12-05 12:33:07.009700582 +0000 UTC m=+1432.594022912" observedRunningTime="2025-12-05 12:33:08.169792307 +0000 UTC m=+1433.754114627" watchObservedRunningTime="2025-12-05 12:33:08.180829888 +0000 UTC m=+1433.765152228" Dec 05 12:33:08 crc kubenswrapper[4711]: I1205 12:33:08.212655 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 12:33:08 crc kubenswrapper[4711]: I1205 12:33:08.979821 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.049062 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-config-data\") pod \"b7cbaee1-6b39-409e-b338-660c4a6ba023\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.049432 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-combined-ca-bundle\") pod \"b7cbaee1-6b39-409e-b338-660c4a6ba023\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.049537 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qf58\" (UniqueName: \"kubernetes.io/projected/b7cbaee1-6b39-409e-b338-660c4a6ba023-kube-api-access-8qf58\") pod \"b7cbaee1-6b39-409e-b338-660c4a6ba023\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.049919 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7cbaee1-6b39-409e-b338-660c4a6ba023-logs\") pod \"b7cbaee1-6b39-409e-b338-660c4a6ba023\" (UID: \"b7cbaee1-6b39-409e-b338-660c4a6ba023\") " Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.050767 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7cbaee1-6b39-409e-b338-660c4a6ba023-logs" (OuterVolumeSpecName: "logs") pod "b7cbaee1-6b39-409e-b338-660c4a6ba023" (UID: "b7cbaee1-6b39-409e-b338-660c4a6ba023"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.052575 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7cbaee1-6b39-409e-b338-660c4a6ba023-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.060579 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7cbaee1-6b39-409e-b338-660c4a6ba023-kube-api-access-8qf58" (OuterVolumeSpecName: "kube-api-access-8qf58") pod "b7cbaee1-6b39-409e-b338-660c4a6ba023" (UID: "b7cbaee1-6b39-409e-b338-660c4a6ba023"). InnerVolumeSpecName "kube-api-access-8qf58". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.090481 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7cbaee1-6b39-409e-b338-660c4a6ba023" (UID: "b7cbaee1-6b39-409e-b338-660c4a6ba023"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.102566 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-config-data" (OuterVolumeSpecName: "config-data") pod "b7cbaee1-6b39-409e-b338-660c4a6ba023" (UID: "b7cbaee1-6b39-409e-b338-660c4a6ba023"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.155697 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.155733 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7cbaee1-6b39-409e-b338-660c4a6ba023-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.155743 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qf58\" (UniqueName: \"kubernetes.io/projected/b7cbaee1-6b39-409e-b338-660c4a6ba023-kube-api-access-8qf58\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.178432 4711 generic.go:334] "Generic (PLEG): container finished" podID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerID="fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527" exitCode=0 Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.178547 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b7cbaee1-6b39-409e-b338-660c4a6ba023","Type":"ContainerDied","Data":"fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527"} Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.178551 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.178575 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b7cbaee1-6b39-409e-b338-660c4a6ba023","Type":"ContainerDied","Data":"72d6b416e607b8ccb44b47dc2e37602da8a6b36611697d178b33d341330adcf1"} Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.178595 4711 scope.go:117] "RemoveContainer" containerID="fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.185234 4711 generic.go:334] "Generic (PLEG): container finished" podID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerID="ed837451b5237f11e551ff07af0ee2467d43c5f9dfd6c4c18fa905101e8a18bb" exitCode=0 Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.185258 4711 generic.go:334] "Generic (PLEG): container finished" podID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerID="f2d80d88bd00bd5e9dfa4a70f18f47ae9a59c8b4f347d2d59c4b3d68cb53257e" exitCode=2 Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.185264 4711 generic.go:334] "Generic (PLEG): container finished" podID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerID="c9de4a9d2086b75c8dcdcda251da4cbb668ccfdacf7b013d37cdb6a1fd4fe7be" exitCode=0 Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.185281 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerDied","Data":"ed837451b5237f11e551ff07af0ee2467d43c5f9dfd6c4c18fa905101e8a18bb"} Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.185311 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerDied","Data":"f2d80d88bd00bd5e9dfa4a70f18f47ae9a59c8b4f347d2d59c4b3d68cb53257e"} Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.185323 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerDied","Data":"c9de4a9d2086b75c8dcdcda251da4cbb668ccfdacf7b013d37cdb6a1fd4fe7be"} Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.232486 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.244353 4711 scope.go:117] "RemoveContainer" containerID="a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.253752 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.266474 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:09 crc kubenswrapper[4711]: E1205 12:33:09.272503 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-log" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.272541 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-log" Dec 05 12:33:09 crc kubenswrapper[4711]: E1205 12:33:09.272558 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-api" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.272564 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-api" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.272774 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-api" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.272801 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" containerName="nova-api-log" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.273898 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.276225 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.279333 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.280764 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.298977 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.313036 4711 scope.go:117] "RemoveContainer" containerID="fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527" Dec 05 12:33:09 crc kubenswrapper[4711]: E1205 12:33:09.313546 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527\": container with ID starting with fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527 not found: ID does not exist" containerID="fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.313581 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527"} err="failed to get container status \"fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527\": rpc error: code = NotFound desc = could not find container \"fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527\": container with ID starting with fca87a43a1cf585f6618da2353ce1d8364af92c26fd4f7226268ca47bdfd0527 not found: ID does not exist" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.313604 4711 scope.go:117] "RemoveContainer" containerID="a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713" Dec 05 12:33:09 crc kubenswrapper[4711]: E1205 12:33:09.313882 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713\": container with ID starting with a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713 not found: ID does not exist" containerID="a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.313904 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713"} err="failed to get container status \"a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713\": rpc error: code = NotFound desc = could not find container \"a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713\": container with ID starting with a141d89be48f92b9f2c2768169447967ff13d7bddbd5d8c52a2680a25de74713 not found: ID does not exist" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.359931 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.360306 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-config-data\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.360343 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-public-tls-certs\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.360590 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwqkg\" (UniqueName: \"kubernetes.io/projected/65521134-3d77-4baa-af20-7efc4ab2b48d-kube-api-access-wwqkg\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.360636 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.360811 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65521134-3d77-4baa-af20-7efc4ab2b48d-logs\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.462919 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-config-data\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.463046 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-public-tls-certs\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.463102 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwqkg\" (UniqueName: \"kubernetes.io/projected/65521134-3d77-4baa-af20-7efc4ab2b48d-kube-api-access-wwqkg\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.463617 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.464209 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65521134-3d77-4baa-af20-7efc4ab2b48d-logs\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.464287 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.466087 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65521134-3d77-4baa-af20-7efc4ab2b48d-logs\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.470242 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-config-data\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.470680 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-public-tls-certs\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.471944 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.472323 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.482603 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwqkg\" (UniqueName: \"kubernetes.io/projected/65521134-3d77-4baa-af20-7efc4ab2b48d-kube-api-access-wwqkg\") pod \"nova-api-0\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " pod="openstack/nova-api-0" Dec 05 12:33:09 crc kubenswrapper[4711]: I1205 12:33:09.623312 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:33:10 crc kubenswrapper[4711]: W1205 12:33:10.174185 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65521134_3d77_4baa_af20_7efc4ab2b48d.slice/crio-b4abc271962ceb3a073e8f4c12d517bfad3d85e15638c8c62e88ffe680754263 WatchSource:0}: Error finding container b4abc271962ceb3a073e8f4c12d517bfad3d85e15638c8c62e88ffe680754263: Status 404 returned error can't find the container with id b4abc271962ceb3a073e8f4c12d517bfad3d85e15638c8c62e88ffe680754263 Dec 05 12:33:10 crc kubenswrapper[4711]: I1205 12:33:10.176781 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:10 crc kubenswrapper[4711]: I1205 12:33:10.206687 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65521134-3d77-4baa-af20-7efc4ab2b48d","Type":"ContainerStarted","Data":"b4abc271962ceb3a073e8f4c12d517bfad3d85e15638c8c62e88ffe680754263"} Dec 05 12:33:10 crc kubenswrapper[4711]: I1205 12:33:10.482566 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:10 crc kubenswrapper[4711]: I1205 12:33:10.619618 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:33:10 crc kubenswrapper[4711]: I1205 12:33:10.620639 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:33:10 crc kubenswrapper[4711]: I1205 12:33:10.698013 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7cbaee1-6b39-409e-b338-660c4a6ba023" path="/var/lib/kubelet/pods/b7cbaee1-6b39-409e-b338-660c4a6ba023/volumes" Dec 05 12:33:11 crc kubenswrapper[4711]: I1205 12:33:11.217344 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65521134-3d77-4baa-af20-7efc4ab2b48d","Type":"ContainerStarted","Data":"35f074132b3996ca1b1f5da794898b77835e65cece467afa3cb009f2666db846"} Dec 05 12:33:11 crc kubenswrapper[4711]: I1205 12:33:11.217422 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65521134-3d77-4baa-af20-7efc4ab2b48d","Type":"ContainerStarted","Data":"fdf30377d3fa9a9a19fcac48234fc4b081641fc72387054e066c66344408a330"} Dec 05 12:33:11 crc kubenswrapper[4711]: I1205 12:33:11.239734 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.239711874 podStartE2EDuration="2.239711874s" podCreationTimestamp="2025-12-05 12:33:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:33:11.234195048 +0000 UTC m=+1436.818517388" watchObservedRunningTime="2025-12-05 12:33:11.239711874 +0000 UTC m=+1436.824034204" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.247228 4711 generic.go:334] "Generic (PLEG): container finished" podID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerID="bed195147d18d0abce6eff4307a28fe785577fd4dc8372ce1abd54b413770f26" exitCode=0 Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.247801 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerDied","Data":"bed195147d18d0abce6eff4307a28fe785577fd4dc8372ce1abd54b413770f26"} Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.368212 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.460327 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8xvm\" (UniqueName: \"kubernetes.io/projected/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-kube-api-access-m8xvm\") pod \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.460375 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-sg-core-conf-yaml\") pod \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.460528 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-log-httpd\") pod \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.460682 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-config-data\") pod \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.460738 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-ceilometer-tls-certs\") pod \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.460769 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-run-httpd\") pod \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.460821 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-scripts\") pod \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.460842 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-combined-ca-bundle\") pod \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\" (UID: \"c80fc5b1-0e14-43b0-bf5b-329b6254d79d\") " Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.461185 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c80fc5b1-0e14-43b0-bf5b-329b6254d79d" (UID: "c80fc5b1-0e14-43b0-bf5b-329b6254d79d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.461634 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c80fc5b1-0e14-43b0-bf5b-329b6254d79d" (UID: "c80fc5b1-0e14-43b0-bf5b-329b6254d79d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.461848 4711 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.467669 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-scripts" (OuterVolumeSpecName: "scripts") pod "c80fc5b1-0e14-43b0-bf5b-329b6254d79d" (UID: "c80fc5b1-0e14-43b0-bf5b-329b6254d79d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.467721 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-kube-api-access-m8xvm" (OuterVolumeSpecName: "kube-api-access-m8xvm") pod "c80fc5b1-0e14-43b0-bf5b-329b6254d79d" (UID: "c80fc5b1-0e14-43b0-bf5b-329b6254d79d"). InnerVolumeSpecName "kube-api-access-m8xvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.481311 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.494099 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c80fc5b1-0e14-43b0-bf5b-329b6254d79d" (UID: "c80fc5b1-0e14-43b0-bf5b-329b6254d79d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.527152 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "c80fc5b1-0e14-43b0-bf5b-329b6254d79d" (UID: "c80fc5b1-0e14-43b0-bf5b-329b6254d79d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.553269 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c7cddbf6c-zpqgg"] Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.553519 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" podUID="3a092676-35a7-435a-a7d2-06dd27c07809" containerName="dnsmasq-dns" containerID="cri-o://ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae" gracePeriod=10 Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.563447 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8xvm\" (UniqueName: \"kubernetes.io/projected/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-kube-api-access-m8xvm\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.563764 4711 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.563779 4711 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.563794 4711 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.563805 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.604826 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c80fc5b1-0e14-43b0-bf5b-329b6254d79d" (UID: "c80fc5b1-0e14-43b0-bf5b-329b6254d79d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.634057 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-config-data" (OuterVolumeSpecName: "config-data") pod "c80fc5b1-0e14-43b0-bf5b-329b6254d79d" (UID: "c80fc5b1-0e14-43b0-bf5b-329b6254d79d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.665400 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.665439 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80fc5b1-0e14-43b0-bf5b-329b6254d79d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:13 crc kubenswrapper[4711]: I1205 12:33:13.959661 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.072189 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwfmf\" (UniqueName: \"kubernetes.io/projected/3a092676-35a7-435a-a7d2-06dd27c07809-kube-api-access-wwfmf\") pod \"3a092676-35a7-435a-a7d2-06dd27c07809\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.072330 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-nb\") pod \"3a092676-35a7-435a-a7d2-06dd27c07809\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.072367 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-sb\") pod \"3a092676-35a7-435a-a7d2-06dd27c07809\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.072479 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-config\") pod \"3a092676-35a7-435a-a7d2-06dd27c07809\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.072507 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-swift-storage-0\") pod \"3a092676-35a7-435a-a7d2-06dd27c07809\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.072624 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-svc\") pod \"3a092676-35a7-435a-a7d2-06dd27c07809\" (UID: \"3a092676-35a7-435a-a7d2-06dd27c07809\") " Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.077899 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a092676-35a7-435a-a7d2-06dd27c07809-kube-api-access-wwfmf" (OuterVolumeSpecName: "kube-api-access-wwfmf") pod "3a092676-35a7-435a-a7d2-06dd27c07809" (UID: "3a092676-35a7-435a-a7d2-06dd27c07809"). InnerVolumeSpecName "kube-api-access-wwfmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.139084 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-config" (OuterVolumeSpecName: "config") pod "3a092676-35a7-435a-a7d2-06dd27c07809" (UID: "3a092676-35a7-435a-a7d2-06dd27c07809"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.139637 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3a092676-35a7-435a-a7d2-06dd27c07809" (UID: "3a092676-35a7-435a-a7d2-06dd27c07809"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.143176 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3a092676-35a7-435a-a7d2-06dd27c07809" (UID: "3a092676-35a7-435a-a7d2-06dd27c07809"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.146283 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3a092676-35a7-435a-a7d2-06dd27c07809" (UID: "3a092676-35a7-435a-a7d2-06dd27c07809"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.166698 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3a092676-35a7-435a-a7d2-06dd27c07809" (UID: "3a092676-35a7-435a-a7d2-06dd27c07809"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.175595 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.175647 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.175659 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.175668 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwfmf\" (UniqueName: \"kubernetes.io/projected/3a092676-35a7-435a-a7d2-06dd27c07809-kube-api-access-wwfmf\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.175685 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.175696 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a092676-35a7-435a-a7d2-06dd27c07809-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.264179 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80fc5b1-0e14-43b0-bf5b-329b6254d79d","Type":"ContainerDied","Data":"7808074b45eb1336395e7cf5217c39c8f5c60a142a48bccc87f35d79bb386f69"} Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.264240 4711 scope.go:117] "RemoveContainer" containerID="ed837451b5237f11e551ff07af0ee2467d43c5f9dfd6c4c18fa905101e8a18bb" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.264542 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.271897 4711 generic.go:334] "Generic (PLEG): container finished" podID="3a092676-35a7-435a-a7d2-06dd27c07809" containerID="ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae" exitCode=0 Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.271937 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" event={"ID":"3a092676-35a7-435a-a7d2-06dd27c07809","Type":"ContainerDied","Data":"ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae"} Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.271960 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" event={"ID":"3a092676-35a7-435a-a7d2-06dd27c07809","Type":"ContainerDied","Data":"88ca84b0f908e1c0b31605002550a2df44fda28fcb41e7d2145ee93c65795b02"} Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.272009 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c7cddbf6c-zpqgg" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.304908 4711 scope.go:117] "RemoveContainer" containerID="f2d80d88bd00bd5e9dfa4a70f18f47ae9a59c8b4f347d2d59c4b3d68cb53257e" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.311478 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.324378 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.334995 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c7cddbf6c-zpqgg"] Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.344784 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:14 crc kubenswrapper[4711]: E1205 12:33:14.345274 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a092676-35a7-435a-a7d2-06dd27c07809" containerName="init" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345292 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a092676-35a7-435a-a7d2-06dd27c07809" containerName="init" Dec 05 12:33:14 crc kubenswrapper[4711]: E1205 12:33:14.345311 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="sg-core" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345318 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="sg-core" Dec 05 12:33:14 crc kubenswrapper[4711]: E1205 12:33:14.345329 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a092676-35a7-435a-a7d2-06dd27c07809" containerName="dnsmasq-dns" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345335 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a092676-35a7-435a-a7d2-06dd27c07809" containerName="dnsmasq-dns" Dec 05 12:33:14 crc kubenswrapper[4711]: E1205 12:33:14.345348 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="ceilometer-central-agent" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345355 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="ceilometer-central-agent" Dec 05 12:33:14 crc kubenswrapper[4711]: E1205 12:33:14.345365 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="proxy-httpd" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345370 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="proxy-httpd" Dec 05 12:33:14 crc kubenswrapper[4711]: E1205 12:33:14.345395 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="ceilometer-notification-agent" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345401 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="ceilometer-notification-agent" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345602 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="ceilometer-notification-agent" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345614 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="proxy-httpd" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345624 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="ceilometer-central-agent" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345639 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a092676-35a7-435a-a7d2-06dd27c07809" containerName="dnsmasq-dns" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.345655 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" containerName="sg-core" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.347605 4711 scope.go:117] "RemoveContainer" containerID="c9de4a9d2086b75c8dcdcda251da4cbb668ccfdacf7b013d37cdb6a1fd4fe7be" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.360490 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c7cddbf6c-zpqgg"] Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.360630 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.361719 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.363003 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.363169 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.363170 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.368626 4711 scope.go:117] "RemoveContainer" containerID="bed195147d18d0abce6eff4307a28fe785577fd4dc8372ce1abd54b413770f26" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.394151 4711 scope.go:117] "RemoveContainer" containerID="ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.416327 4711 scope.go:117] "RemoveContainer" containerID="dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.451466 4711 scope.go:117] "RemoveContainer" containerID="ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae" Dec 05 12:33:14 crc kubenswrapper[4711]: E1205 12:33:14.451930 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae\": container with ID starting with ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae not found: ID does not exist" containerID="ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.451979 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae"} err="failed to get container status \"ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae\": rpc error: code = NotFound desc = could not find container \"ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae\": container with ID starting with ae6bd1f7296f44af8cf6730ebb812a062799627afb9f9fc3fc30c46fc8f08bae not found: ID does not exist" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.452008 4711 scope.go:117] "RemoveContainer" containerID="dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48" Dec 05 12:33:14 crc kubenswrapper[4711]: E1205 12:33:14.452244 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48\": container with ID starting with dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48 not found: ID does not exist" containerID="dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.452291 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48"} err="failed to get container status \"dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48\": rpc error: code = NotFound desc = could not find container \"dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48\": container with ID starting with dabb83609fbb0fb09b93ddc20f4ebd698467281d09424729ff64b4302d05cb48 not found: ID does not exist" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.481721 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.481777 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.482055 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa9b64a3-41ee-4892-92a8-4d404c1545fc-log-httpd\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.482179 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp82d\" (UniqueName: \"kubernetes.io/projected/fa9b64a3-41ee-4892-92a8-4d404c1545fc-kube-api-access-rp82d\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.482203 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-config-data\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.482245 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-scripts\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.482314 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa9b64a3-41ee-4892-92a8-4d404c1545fc-run-httpd\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.482435 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.584080 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.584137 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.584228 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa9b64a3-41ee-4892-92a8-4d404c1545fc-log-httpd\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.584275 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp82d\" (UniqueName: \"kubernetes.io/projected/fa9b64a3-41ee-4892-92a8-4d404c1545fc-kube-api-access-rp82d\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.584292 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-config-data\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.584313 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-scripts\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.584334 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa9b64a3-41ee-4892-92a8-4d404c1545fc-run-httpd\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.584365 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.587008 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa9b64a3-41ee-4892-92a8-4d404c1545fc-log-httpd\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.588932 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa9b64a3-41ee-4892-92a8-4d404c1545fc-run-httpd\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.589730 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.589796 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.590294 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-scripts\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.590990 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-config-data\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.594174 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9b64a3-41ee-4892-92a8-4d404c1545fc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.604664 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp82d\" (UniqueName: \"kubernetes.io/projected/fa9b64a3-41ee-4892-92a8-4d404c1545fc-kube-api-access-rp82d\") pod \"ceilometer-0\" (UID: \"fa9b64a3-41ee-4892-92a8-4d404c1545fc\") " pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.679511 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.707623 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a092676-35a7-435a-a7d2-06dd27c07809" path="/var/lib/kubelet/pods/3a092676-35a7-435a-a7d2-06dd27c07809/volumes" Dec 05 12:33:14 crc kubenswrapper[4711]: I1205 12:33:14.709362 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c80fc5b1-0e14-43b0-bf5b-329b6254d79d" path="/var/lib/kubelet/pods/c80fc5b1-0e14-43b0-bf5b-329b6254d79d/volumes" Dec 05 12:33:15 crc kubenswrapper[4711]: W1205 12:33:15.148791 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa9b64a3_41ee_4892_92a8_4d404c1545fc.slice/crio-596a64f9e6f7d9e02c0d9c7e78d6c0395e1ca12b7bb290687e6b7f8333b63f9b WatchSource:0}: Error finding container 596a64f9e6f7d9e02c0d9c7e78d6c0395e1ca12b7bb290687e6b7f8333b63f9b: Status 404 returned error can't find the container with id 596a64f9e6f7d9e02c0d9c7e78d6c0395e1ca12b7bb290687e6b7f8333b63f9b Dec 05 12:33:15 crc kubenswrapper[4711]: I1205 12:33:15.150552 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:33:15 crc kubenswrapper[4711]: I1205 12:33:15.284116 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa9b64a3-41ee-4892-92a8-4d404c1545fc","Type":"ContainerStarted","Data":"596a64f9e6f7d9e02c0d9c7e78d6c0395e1ca12b7bb290687e6b7f8333b63f9b"} Dec 05 12:33:15 crc kubenswrapper[4711]: I1205 12:33:15.482440 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:15 crc kubenswrapper[4711]: I1205 12:33:15.507112 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:15 crc kubenswrapper[4711]: I1205 12:33:15.610227 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 12:33:15 crc kubenswrapper[4711]: I1205 12:33:15.610624 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 12:33:15 crc kubenswrapper[4711]: I1205 12:33:15.650021 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:33:15 crc kubenswrapper[4711]: I1205 12:33:15.679331 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.296113 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa9b64a3-41ee-4892-92a8-4d404c1545fc","Type":"ContainerStarted","Data":"ea74fc849949048b2e8645b7282be4ea5023546f52eb0f9cdbf7a06aa9f9c2f6"} Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.296149 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa9b64a3-41ee-4892-92a8-4d404c1545fc","Type":"ContainerStarted","Data":"f3416b7384186e0e5bb6c1412a282946cbe9ad6c3e4a4b86c287c7bb8f2ff88d"} Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.296675 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.311520 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.346734 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.573691 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-qwvs4"] Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.575091 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.576782 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.576984 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.591123 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-qwvs4"] Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.629623 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.629907 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.739137 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.739480 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-config-data\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.739898 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg4p2\" (UniqueName: \"kubernetes.io/projected/4927b347-0a86-4ab8-8a48-345cd36637b4-kube-api-access-kg4p2\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.739973 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-scripts\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.841955 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg4p2\" (UniqueName: \"kubernetes.io/projected/4927b347-0a86-4ab8-8a48-345cd36637b4-kube-api-access-kg4p2\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.842066 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-scripts\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.842934 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.842960 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-config-data\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.847595 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-scripts\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.847739 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-config-data\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.858045 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.859624 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg4p2\" (UniqueName: \"kubernetes.io/projected/4927b347-0a86-4ab8-8a48-345cd36637b4-kube-api-access-kg4p2\") pod \"nova-cell1-cell-mapping-qwvs4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:16 crc kubenswrapper[4711]: I1205 12:33:16.891850 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:17 crc kubenswrapper[4711]: I1205 12:33:17.306611 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa9b64a3-41ee-4892-92a8-4d404c1545fc","Type":"ContainerStarted","Data":"6800fc0564e5cf05c35d403b509cc3300c8aee71fc075fe30a2cabefe7cbbe80"} Dec 05 12:33:17 crc kubenswrapper[4711]: I1205 12:33:17.432223 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-qwvs4"] Dec 05 12:33:18 crc kubenswrapper[4711]: I1205 12:33:18.318008 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa9b64a3-41ee-4892-92a8-4d404c1545fc","Type":"ContainerStarted","Data":"734fa1ee5348e3f0c6dfff63139ae1a43e595fc2beee5dc53bcf86c96146accd"} Dec 05 12:33:18 crc kubenswrapper[4711]: I1205 12:33:18.321238 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qwvs4" event={"ID":"4927b347-0a86-4ab8-8a48-345cd36637b4","Type":"ContainerStarted","Data":"dbe85db4d6cff17470dc1fe98399ec484e8773d53f5545c66cae3285282655a0"} Dec 05 12:33:18 crc kubenswrapper[4711]: I1205 12:33:18.321274 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qwvs4" event={"ID":"4927b347-0a86-4ab8-8a48-345cd36637b4","Type":"ContainerStarted","Data":"a741711280de68adaa2da466f52ebba8a41514b47b241d84f5a99df1dd6aa48c"} Dec 05 12:33:18 crc kubenswrapper[4711]: I1205 12:33:18.343296 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.545491347 podStartE2EDuration="4.343274753s" podCreationTimestamp="2025-12-05 12:33:14 +0000 UTC" firstStartedPulling="2025-12-05 12:33:15.151286845 +0000 UTC m=+1440.735609175" lastFinishedPulling="2025-12-05 12:33:17.949070251 +0000 UTC m=+1443.533392581" observedRunningTime="2025-12-05 12:33:18.341724335 +0000 UTC m=+1443.926046665" watchObservedRunningTime="2025-12-05 12:33:18.343274753 +0000 UTC m=+1443.927597083" Dec 05 12:33:18 crc kubenswrapper[4711]: I1205 12:33:18.373721 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-qwvs4" podStartSLOduration=2.373691042 podStartE2EDuration="2.373691042s" podCreationTimestamp="2025-12-05 12:33:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:33:18.360161699 +0000 UTC m=+1443.944484029" watchObservedRunningTime="2025-12-05 12:33:18.373691042 +0000 UTC m=+1443.958013372" Dec 05 12:33:19 crc kubenswrapper[4711]: I1205 12:33:19.332412 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:33:19 crc kubenswrapper[4711]: I1205 12:33:19.623972 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:33:19 crc kubenswrapper[4711]: I1205 12:33:19.624034 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:33:20 crc kubenswrapper[4711]: I1205 12:33:20.636524 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.218:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:33:20 crc kubenswrapper[4711]: I1205 12:33:20.636709 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.218:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:33:23 crc kubenswrapper[4711]: I1205 12:33:23.373614 4711 generic.go:334] "Generic (PLEG): container finished" podID="4927b347-0a86-4ab8-8a48-345cd36637b4" containerID="dbe85db4d6cff17470dc1fe98399ec484e8773d53f5545c66cae3285282655a0" exitCode=0 Dec 05 12:33:23 crc kubenswrapper[4711]: I1205 12:33:23.373690 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qwvs4" event={"ID":"4927b347-0a86-4ab8-8a48-345cd36637b4","Type":"ContainerDied","Data":"dbe85db4d6cff17470dc1fe98399ec484e8773d53f5545c66cae3285282655a0"} Dec 05 12:33:24 crc kubenswrapper[4711]: I1205 12:33:24.778556 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:24 crc kubenswrapper[4711]: I1205 12:33:24.898240 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-scripts\") pod \"4927b347-0a86-4ab8-8a48-345cd36637b4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " Dec 05 12:33:24 crc kubenswrapper[4711]: I1205 12:33:24.898455 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-config-data\") pod \"4927b347-0a86-4ab8-8a48-345cd36637b4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " Dec 05 12:33:24 crc kubenswrapper[4711]: I1205 12:33:24.898480 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg4p2\" (UniqueName: \"kubernetes.io/projected/4927b347-0a86-4ab8-8a48-345cd36637b4-kube-api-access-kg4p2\") pod \"4927b347-0a86-4ab8-8a48-345cd36637b4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " Dec 05 12:33:24 crc kubenswrapper[4711]: I1205 12:33:24.898648 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-combined-ca-bundle\") pod \"4927b347-0a86-4ab8-8a48-345cd36637b4\" (UID: \"4927b347-0a86-4ab8-8a48-345cd36637b4\") " Dec 05 12:33:24 crc kubenswrapper[4711]: I1205 12:33:24.904660 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-scripts" (OuterVolumeSpecName: "scripts") pod "4927b347-0a86-4ab8-8a48-345cd36637b4" (UID: "4927b347-0a86-4ab8-8a48-345cd36637b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:24 crc kubenswrapper[4711]: I1205 12:33:24.904718 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4927b347-0a86-4ab8-8a48-345cd36637b4-kube-api-access-kg4p2" (OuterVolumeSpecName: "kube-api-access-kg4p2") pod "4927b347-0a86-4ab8-8a48-345cd36637b4" (UID: "4927b347-0a86-4ab8-8a48-345cd36637b4"). InnerVolumeSpecName "kube-api-access-kg4p2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:24 crc kubenswrapper[4711]: I1205 12:33:24.931941 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4927b347-0a86-4ab8-8a48-345cd36637b4" (UID: "4927b347-0a86-4ab8-8a48-345cd36637b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:24 crc kubenswrapper[4711]: I1205 12:33:24.941587 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-config-data" (OuterVolumeSpecName: "config-data") pod "4927b347-0a86-4ab8-8a48-345cd36637b4" (UID: "4927b347-0a86-4ab8-8a48-345cd36637b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.000971 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.001008 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.001017 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4927b347-0a86-4ab8-8a48-345cd36637b4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.001025 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg4p2\" (UniqueName: \"kubernetes.io/projected/4927b347-0a86-4ab8-8a48-345cd36637b4-kube-api-access-kg4p2\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.426786 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qwvs4" event={"ID":"4927b347-0a86-4ab8-8a48-345cd36637b4","Type":"ContainerDied","Data":"a741711280de68adaa2da466f52ebba8a41514b47b241d84f5a99df1dd6aa48c"} Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.426910 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a741711280de68adaa2da466f52ebba8a41514b47b241d84f5a99df1dd6aa48c" Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.426950 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qwvs4" Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.585989 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.586225 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c77ea68b-9a3e-4579-84d5-f13aab92e17c" containerName="nova-scheduler-scheduler" containerID="cri-o://b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73" gracePeriod=30 Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.597524 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.597859 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-log" containerID="cri-o://fdf30377d3fa9a9a19fcac48234fc4b081641fc72387054e066c66344408a330" gracePeriod=30 Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.598015 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-api" containerID="cri-o://35f074132b3996ca1b1f5da794898b77835e65cece467afa3cb009f2666db846" gracePeriod=30 Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.606371 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.606626 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-log" containerID="cri-o://8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41" gracePeriod=30 Dec 05 12:33:25 crc kubenswrapper[4711]: I1205 12:33:25.606764 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-metadata" containerID="cri-o://f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661" gracePeriod=30 Dec 05 12:33:26 crc kubenswrapper[4711]: I1205 12:33:26.438095 4711 generic.go:334] "Generic (PLEG): container finished" podID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerID="fdf30377d3fa9a9a19fcac48234fc4b081641fc72387054e066c66344408a330" exitCode=143 Dec 05 12:33:26 crc kubenswrapper[4711]: I1205 12:33:26.438171 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65521134-3d77-4baa-af20-7efc4ab2b48d","Type":"ContainerDied","Data":"fdf30377d3fa9a9a19fcac48234fc4b081641fc72387054e066c66344408a330"} Dec 05 12:33:26 crc kubenswrapper[4711]: I1205 12:33:26.440058 4711 generic.go:334] "Generic (PLEG): container finished" podID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerID="8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41" exitCode=143 Dec 05 12:33:26 crc kubenswrapper[4711]: I1205 12:33:26.440087 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"58bb3765-e8c7-4bd7-9c64-0b5257d5a140","Type":"ContainerDied","Data":"8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41"} Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.036569 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.042965 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.154980 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlsqz\" (UniqueName: \"kubernetes.io/projected/c77ea68b-9a3e-4579-84d5-f13aab92e17c-kube-api-access-mlsqz\") pod \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.155064 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-combined-ca-bundle\") pod \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.155085 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-config-data\") pod \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.155148 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-config-data\") pod \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\" (UID: \"c77ea68b-9a3e-4579-84d5-f13aab92e17c\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.155167 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-logs\") pod \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.155279 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bl96\" (UniqueName: \"kubernetes.io/projected/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-kube-api-access-7bl96\") pod \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.155361 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-combined-ca-bundle\") pod \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.155489 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-nova-metadata-tls-certs\") pod \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\" (UID: \"58bb3765-e8c7-4bd7-9c64-0b5257d5a140\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.156369 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-logs" (OuterVolumeSpecName: "logs") pod "58bb3765-e8c7-4bd7-9c64-0b5257d5a140" (UID: "58bb3765-e8c7-4bd7-9c64-0b5257d5a140"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.161810 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c77ea68b-9a3e-4579-84d5-f13aab92e17c-kube-api-access-mlsqz" (OuterVolumeSpecName: "kube-api-access-mlsqz") pod "c77ea68b-9a3e-4579-84d5-f13aab92e17c" (UID: "c77ea68b-9a3e-4579-84d5-f13aab92e17c"). InnerVolumeSpecName "kube-api-access-mlsqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.161879 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-kube-api-access-7bl96" (OuterVolumeSpecName: "kube-api-access-7bl96") pod "58bb3765-e8c7-4bd7-9c64-0b5257d5a140" (UID: "58bb3765-e8c7-4bd7-9c64-0b5257d5a140"). InnerVolumeSpecName "kube-api-access-7bl96". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.193543 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-config-data" (OuterVolumeSpecName: "config-data") pod "58bb3765-e8c7-4bd7-9c64-0b5257d5a140" (UID: "58bb3765-e8c7-4bd7-9c64-0b5257d5a140"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.195398 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58bb3765-e8c7-4bd7-9c64-0b5257d5a140" (UID: "58bb3765-e8c7-4bd7-9c64-0b5257d5a140"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.195840 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c77ea68b-9a3e-4579-84d5-f13aab92e17c" (UID: "c77ea68b-9a3e-4579-84d5-f13aab92e17c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.200484 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-config-data" (OuterVolumeSpecName: "config-data") pod "c77ea68b-9a3e-4579-84d5-f13aab92e17c" (UID: "c77ea68b-9a3e-4579-84d5-f13aab92e17c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.224751 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "58bb3765-e8c7-4bd7-9c64-0b5257d5a140" (UID: "58bb3765-e8c7-4bd7-9c64-0b5257d5a140"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.258021 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.258054 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.258064 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bl96\" (UniqueName: \"kubernetes.io/projected/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-kube-api-access-7bl96\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.258078 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.258089 4711 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.258104 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlsqz\" (UniqueName: \"kubernetes.io/projected/c77ea68b-9a3e-4579-84d5-f13aab92e17c-kube-api-access-mlsqz\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.258114 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77ea68b-9a3e-4579-84d5-f13aab92e17c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.258124 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58bb3765-e8c7-4bd7-9c64-0b5257d5a140-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.451303 4711 generic.go:334] "Generic (PLEG): container finished" podID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerID="f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661" exitCode=0 Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.451445 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"58bb3765-e8c7-4bd7-9c64-0b5257d5a140","Type":"ContainerDied","Data":"f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661"} Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.451637 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"58bb3765-e8c7-4bd7-9c64-0b5257d5a140","Type":"ContainerDied","Data":"4e499a9c9dfa0019ef585304b7e776974f3506d45b32afa4f511453d70aabc8e"} Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.451657 4711 scope.go:117] "RemoveContainer" containerID="f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.451514 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.455233 4711 generic.go:334] "Generic (PLEG): container finished" podID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerID="35f074132b3996ca1b1f5da794898b77835e65cece467afa3cb009f2666db846" exitCode=0 Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.455374 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65521134-3d77-4baa-af20-7efc4ab2b48d","Type":"ContainerDied","Data":"35f074132b3996ca1b1f5da794898b77835e65cece467afa3cb009f2666db846"} Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.457354 4711 generic.go:334] "Generic (PLEG): container finished" podID="c77ea68b-9a3e-4579-84d5-f13aab92e17c" containerID="b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73" exitCode=0 Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.457481 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c77ea68b-9a3e-4579-84d5-f13aab92e17c","Type":"ContainerDied","Data":"b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73"} Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.457582 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c77ea68b-9a3e-4579-84d5-f13aab92e17c","Type":"ContainerDied","Data":"c8e8676c6ab84dd27115ebd9408546a27af7c67ed1eb066fd43a6b0eee2fa21b"} Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.457611 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.499174 4711 scope.go:117] "RemoveContainer" containerID="8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.522089 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.546943 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.554658 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:27 crc kubenswrapper[4711]: E1205 12:33:27.555442 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4927b347-0a86-4ab8-8a48-345cd36637b4" containerName="nova-manage" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.555618 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4927b347-0a86-4ab8-8a48-345cd36637b4" containerName="nova-manage" Dec 05 12:33:27 crc kubenswrapper[4711]: E1205 12:33:27.556297 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-log" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.556396 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-log" Dec 05 12:33:27 crc kubenswrapper[4711]: E1205 12:33:27.556477 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-metadata" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.556541 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-metadata" Dec 05 12:33:27 crc kubenswrapper[4711]: E1205 12:33:27.556626 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c77ea68b-9a3e-4579-84d5-f13aab92e17c" containerName="nova-scheduler-scheduler" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.556684 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c77ea68b-9a3e-4579-84d5-f13aab92e17c" containerName="nova-scheduler-scheduler" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.556997 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c77ea68b-9a3e-4579-84d5-f13aab92e17c" containerName="nova-scheduler-scheduler" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.557079 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4927b347-0a86-4ab8-8a48-345cd36637b4" containerName="nova-manage" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.557163 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-log" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.557244 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" containerName="nova-metadata-metadata" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.558687 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.561605 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.562316 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.565833 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.570623 4711 scope.go:117] "RemoveContainer" containerID="f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661" Dec 05 12:33:27 crc kubenswrapper[4711]: E1205 12:33:27.574120 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661\": container with ID starting with f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661 not found: ID does not exist" containerID="f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.574166 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661"} err="failed to get container status \"f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661\": rpc error: code = NotFound desc = could not find container \"f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661\": container with ID starting with f0a9f6131e433bfc2215302002b3bde535779918e771851c0909eaaf922e0661 not found: ID does not exist" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.574192 4711 scope.go:117] "RemoveContainer" containerID="8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41" Dec 05 12:33:27 crc kubenswrapper[4711]: E1205 12:33:27.574546 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41\": container with ID starting with 8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41 not found: ID does not exist" containerID="8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.574596 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41"} err="failed to get container status \"8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41\": rpc error: code = NotFound desc = could not find container \"8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41\": container with ID starting with 8fab5d4c6e303326e280adf608729860c64eb49cc085fe750cca26959a405b41 not found: ID does not exist" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.574620 4711 scope.go:117] "RemoveContainer" containerID="b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.583277 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.594571 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.596043 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.603644 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.605029 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.614636 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.629866 4711 scope.go:117] "RemoveContainer" containerID="b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73" Dec 05 12:33:27 crc kubenswrapper[4711]: E1205 12:33:27.630454 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73\": container with ID starting with b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73 not found: ID does not exist" containerID="b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.630488 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73"} err="failed to get container status \"b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73\": rpc error: code = NotFound desc = could not find container \"b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73\": container with ID starting with b938fb16f36ec0c6248fa46a1c116b8a5a0e163e471082fe85f31b1c73823c73 not found: ID does not exist" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.661883 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.665801 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dpmh\" (UniqueName: \"kubernetes.io/projected/abe3c847-4536-4e00-943c-14ecbb8f9600-kube-api-access-4dpmh\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.665864 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/abe3c847-4536-4e00-943c-14ecbb8f9600-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.665904 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc5qq\" (UniqueName: \"kubernetes.io/projected/e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9-kube-api-access-kc5qq\") pod \"nova-scheduler-0\" (UID: \"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9\") " pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.665939 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abe3c847-4536-4e00-943c-14ecbb8f9600-logs\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.666013 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abe3c847-4536-4e00-943c-14ecbb8f9600-config-data\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.666032 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abe3c847-4536-4e00-943c-14ecbb8f9600-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.666070 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9\") " pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.666122 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9-config-data\") pod \"nova-scheduler-0\" (UID: \"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9\") " pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.767663 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65521134-3d77-4baa-af20-7efc4ab2b48d-logs\") pod \"65521134-3d77-4baa-af20-7efc4ab2b48d\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.767792 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwqkg\" (UniqueName: \"kubernetes.io/projected/65521134-3d77-4baa-af20-7efc4ab2b48d-kube-api-access-wwqkg\") pod \"65521134-3d77-4baa-af20-7efc4ab2b48d\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.767873 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-config-data\") pod \"65521134-3d77-4baa-af20-7efc4ab2b48d\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.767921 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-combined-ca-bundle\") pod \"65521134-3d77-4baa-af20-7efc4ab2b48d\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.767962 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-public-tls-certs\") pod \"65521134-3d77-4baa-af20-7efc4ab2b48d\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.768035 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-internal-tls-certs\") pod \"65521134-3d77-4baa-af20-7efc4ab2b48d\" (UID: \"65521134-3d77-4baa-af20-7efc4ab2b48d\") " Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.768326 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9\") " pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.768401 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9-config-data\") pod \"nova-scheduler-0\" (UID: \"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9\") " pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.768474 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dpmh\" (UniqueName: \"kubernetes.io/projected/abe3c847-4536-4e00-943c-14ecbb8f9600-kube-api-access-4dpmh\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.768981 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/abe3c847-4536-4e00-943c-14ecbb8f9600-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.768642 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65521134-3d77-4baa-af20-7efc4ab2b48d-logs" (OuterVolumeSpecName: "logs") pod "65521134-3d77-4baa-af20-7efc4ab2b48d" (UID: "65521134-3d77-4baa-af20-7efc4ab2b48d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.769018 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc5qq\" (UniqueName: \"kubernetes.io/projected/e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9-kube-api-access-kc5qq\") pod \"nova-scheduler-0\" (UID: \"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9\") " pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.769052 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abe3c847-4536-4e00-943c-14ecbb8f9600-logs\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.769147 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abe3c847-4536-4e00-943c-14ecbb8f9600-config-data\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.769168 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abe3c847-4536-4e00-943c-14ecbb8f9600-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.769233 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65521134-3d77-4baa-af20-7efc4ab2b48d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.769899 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abe3c847-4536-4e00-943c-14ecbb8f9600-logs\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.773491 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abe3c847-4536-4e00-943c-14ecbb8f9600-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.775078 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65521134-3d77-4baa-af20-7efc4ab2b48d-kube-api-access-wwqkg" (OuterVolumeSpecName: "kube-api-access-wwqkg") pod "65521134-3d77-4baa-af20-7efc4ab2b48d" (UID: "65521134-3d77-4baa-af20-7efc4ab2b48d"). InnerVolumeSpecName "kube-api-access-wwqkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.785279 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9\") " pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.785286 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9-config-data\") pod \"nova-scheduler-0\" (UID: \"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9\") " pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.785333 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abe3c847-4536-4e00-943c-14ecbb8f9600-config-data\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.794187 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc5qq\" (UniqueName: \"kubernetes.io/projected/e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9-kube-api-access-kc5qq\") pod \"nova-scheduler-0\" (UID: \"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9\") " pod="openstack/nova-scheduler-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.794724 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dpmh\" (UniqueName: \"kubernetes.io/projected/abe3c847-4536-4e00-943c-14ecbb8f9600-kube-api-access-4dpmh\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.796709 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/abe3c847-4536-4e00-943c-14ecbb8f9600-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"abe3c847-4536-4e00-943c-14ecbb8f9600\") " pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.814516 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65521134-3d77-4baa-af20-7efc4ab2b48d" (UID: "65521134-3d77-4baa-af20-7efc4ab2b48d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.826360 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-config-data" (OuterVolumeSpecName: "config-data") pod "65521134-3d77-4baa-af20-7efc4ab2b48d" (UID: "65521134-3d77-4baa-af20-7efc4ab2b48d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.845595 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "65521134-3d77-4baa-af20-7efc4ab2b48d" (UID: "65521134-3d77-4baa-af20-7efc4ab2b48d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.848357 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "65521134-3d77-4baa-af20-7efc4ab2b48d" (UID: "65521134-3d77-4baa-af20-7efc4ab2b48d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.872290 4711 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.872334 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwqkg\" (UniqueName: \"kubernetes.io/projected/65521134-3d77-4baa-af20-7efc4ab2b48d-kube-api-access-wwqkg\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.872347 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.872355 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.872364 4711 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65521134-3d77-4baa-af20-7efc4ab2b48d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.878232 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:33:27 crc kubenswrapper[4711]: I1205 12:33:27.917057 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.361834 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.410091 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:33:28 crc kubenswrapper[4711]: W1205 12:33:28.411631 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode43a6d5b_867b_4aee_b8bd_37f5ef5d92b9.slice/crio-484d9e7678b9d1892ae076c7dc584c49aa62ed6153d25751e081e0ff0e6d1e0a WatchSource:0}: Error finding container 484d9e7678b9d1892ae076c7dc584c49aa62ed6153d25751e081e0ff0e6d1e0a: Status 404 returned error can't find the container with id 484d9e7678b9d1892ae076c7dc584c49aa62ed6153d25751e081e0ff0e6d1e0a Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.473714 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9","Type":"ContainerStarted","Data":"484d9e7678b9d1892ae076c7dc584c49aa62ed6153d25751e081e0ff0e6d1e0a"} Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.476615 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65521134-3d77-4baa-af20-7efc4ab2b48d","Type":"ContainerDied","Data":"b4abc271962ceb3a073e8f4c12d517bfad3d85e15638c8c62e88ffe680754263"} Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.476672 4711 scope.go:117] "RemoveContainer" containerID="35f074132b3996ca1b1f5da794898b77835e65cece467afa3cb009f2666db846" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.476670 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.478064 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"abe3c847-4536-4e00-943c-14ecbb8f9600","Type":"ContainerStarted","Data":"742c4ae660dfb0fc5d0355fd9400c719411a91b4651af7aaeeae14df2b12d610"} Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.522975 4711 scope.go:117] "RemoveContainer" containerID="fdf30377d3fa9a9a19fcac48234fc4b081641fc72387054e066c66344408a330" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.533347 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.552535 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.564521 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:28 crc kubenswrapper[4711]: E1205 12:33:28.565148 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-api" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.565243 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-api" Dec 05 12:33:28 crc kubenswrapper[4711]: E1205 12:33:28.565325 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-log" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.565401 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-log" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.565724 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-api" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.565841 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" containerName="nova-api-log" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.567322 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.571791 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.572126 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.572411 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.597901 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.687415 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvhwj\" (UniqueName: \"kubernetes.io/projected/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-kube-api-access-tvhwj\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.687655 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.687716 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-logs\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.687748 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.687849 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-public-tls-certs\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.688007 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-config-data\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.699399 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58bb3765-e8c7-4bd7-9c64-0b5257d5a140" path="/var/lib/kubelet/pods/58bb3765-e8c7-4bd7-9c64-0b5257d5a140/volumes" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.700182 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65521134-3d77-4baa-af20-7efc4ab2b48d" path="/var/lib/kubelet/pods/65521134-3d77-4baa-af20-7efc4ab2b48d/volumes" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.700902 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c77ea68b-9a3e-4579-84d5-f13aab92e17c" path="/var/lib/kubelet/pods/c77ea68b-9a3e-4579-84d5-f13aab92e17c/volumes" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.789917 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-config-data\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.789967 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvhwj\" (UniqueName: \"kubernetes.io/projected/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-kube-api-access-tvhwj\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.790063 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.790096 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-logs\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.790115 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.790161 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-public-tls-certs\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.791114 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-logs\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.793996 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-config-data\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.794836 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.795174 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-public-tls-certs\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.795557 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.805415 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvhwj\" (UniqueName: \"kubernetes.io/projected/da6eb04a-87a2-4470-b96a-97e7bbc52a4a-kube-api-access-tvhwj\") pod \"nova-api-0\" (UID: \"da6eb04a-87a2-4470-b96a-97e7bbc52a4a\") " pod="openstack/nova-api-0" Dec 05 12:33:28 crc kubenswrapper[4711]: I1205 12:33:28.897818 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:33:29 crc kubenswrapper[4711]: I1205 12:33:29.336149 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:33:29 crc kubenswrapper[4711]: W1205 12:33:29.343805 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda6eb04a_87a2_4470_b96a_97e7bbc52a4a.slice/crio-92c1c624c2360e200ca6a28f02c8bddc619fd27977474c3b0510bde0693bf252 WatchSource:0}: Error finding container 92c1c624c2360e200ca6a28f02c8bddc619fd27977474c3b0510bde0693bf252: Status 404 returned error can't find the container with id 92c1c624c2360e200ca6a28f02c8bddc619fd27977474c3b0510bde0693bf252 Dec 05 12:33:29 crc kubenswrapper[4711]: I1205 12:33:29.494658 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"da6eb04a-87a2-4470-b96a-97e7bbc52a4a","Type":"ContainerStarted","Data":"92c1c624c2360e200ca6a28f02c8bddc619fd27977474c3b0510bde0693bf252"} Dec 05 12:33:29 crc kubenswrapper[4711]: I1205 12:33:29.500626 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"abe3c847-4536-4e00-943c-14ecbb8f9600","Type":"ContainerStarted","Data":"f65f4554a762597a7cfc569400e406529fca54e6ad39023c9fb2fb5a0e2f6fbf"} Dec 05 12:33:29 crc kubenswrapper[4711]: I1205 12:33:29.500692 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"abe3c847-4536-4e00-943c-14ecbb8f9600","Type":"ContainerStarted","Data":"3fe858dd73f6403cd920a04442418bb953d5b42f5cb9a7096ae4d33d57215c02"} Dec 05 12:33:29 crc kubenswrapper[4711]: I1205 12:33:29.505210 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9","Type":"ContainerStarted","Data":"124fd2f23a5bab546708710a6524c426cb1d0a2be69e3d8ae6332ae7e2511439"} Dec 05 12:33:29 crc kubenswrapper[4711]: I1205 12:33:29.527481 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.5274432620000002 podStartE2EDuration="2.527443262s" podCreationTimestamp="2025-12-05 12:33:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:33:29.516586735 +0000 UTC m=+1455.100909075" watchObservedRunningTime="2025-12-05 12:33:29.527443262 +0000 UTC m=+1455.111765592" Dec 05 12:33:29 crc kubenswrapper[4711]: I1205 12:33:29.538206 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.538176576 podStartE2EDuration="2.538176576s" podCreationTimestamp="2025-12-05 12:33:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:33:29.532197528 +0000 UTC m=+1455.116519858" watchObservedRunningTime="2025-12-05 12:33:29.538176576 +0000 UTC m=+1455.122498906" Dec 05 12:33:30 crc kubenswrapper[4711]: I1205 12:33:30.516184 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"da6eb04a-87a2-4470-b96a-97e7bbc52a4a","Type":"ContainerStarted","Data":"d0e523e6635bc86c78fbeeeb27c867c718edcc1f8af1f839693ef22c30f2ba17"} Dec 05 12:33:30 crc kubenswrapper[4711]: I1205 12:33:30.516544 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"da6eb04a-87a2-4470-b96a-97e7bbc52a4a","Type":"ContainerStarted","Data":"b8227f56b06d084436b03b6821b58e0a3ede5fb6b380fc21d664e30b6176b0c5"} Dec 05 12:33:30 crc kubenswrapper[4711]: I1205 12:33:30.539920 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.539901697 podStartE2EDuration="2.539901697s" podCreationTimestamp="2025-12-05 12:33:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:33:30.53554396 +0000 UTC m=+1456.119866310" watchObservedRunningTime="2025-12-05 12:33:30.539901697 +0000 UTC m=+1456.124224027" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.053646 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wkptm"] Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.055823 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.066678 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wkptm"] Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.155257 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-utilities\") pod \"community-operators-wkptm\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.155902 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-catalog-content\") pod \"community-operators-wkptm\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.156059 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9p87\" (UniqueName: \"kubernetes.io/projected/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-kube-api-access-n9p87\") pod \"community-operators-wkptm\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.258103 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9p87\" (UniqueName: \"kubernetes.io/projected/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-kube-api-access-n9p87\") pod \"community-operators-wkptm\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.258184 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-utilities\") pod \"community-operators-wkptm\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.258309 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-catalog-content\") pod \"community-operators-wkptm\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.259062 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-catalog-content\") pod \"community-operators-wkptm\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.259288 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-utilities\") pod \"community-operators-wkptm\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.282675 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9p87\" (UniqueName: \"kubernetes.io/projected/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-kube-api-access-n9p87\") pod \"community-operators-wkptm\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.390174 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.878768 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.879531 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.895278 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wkptm"] Dec 05 12:33:32 crc kubenswrapper[4711]: W1205 12:33:32.909356 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6821e63_e515_4ffc_8e62_cb7b2170e7f6.slice/crio-6267dad76a985ce04ca5b4cbdcf062540fb4001d1222beed8ae064513b782d65 WatchSource:0}: Error finding container 6267dad76a985ce04ca5b4cbdcf062540fb4001d1222beed8ae064513b782d65: Status 404 returned error can't find the container with id 6267dad76a985ce04ca5b4cbdcf062540fb4001d1222beed8ae064513b782d65 Dec 05 12:33:32 crc kubenswrapper[4711]: I1205 12:33:32.918190 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 12:33:33 crc kubenswrapper[4711]: I1205 12:33:33.546605 4711 generic.go:334] "Generic (PLEG): container finished" podID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerID="593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a" exitCode=0 Dec 05 12:33:33 crc kubenswrapper[4711]: I1205 12:33:33.546649 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkptm" event={"ID":"d6821e63-e515-4ffc-8e62-cb7b2170e7f6","Type":"ContainerDied","Data":"593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a"} Dec 05 12:33:33 crc kubenswrapper[4711]: I1205 12:33:33.548561 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkptm" event={"ID":"d6821e63-e515-4ffc-8e62-cb7b2170e7f6","Type":"ContainerStarted","Data":"6267dad76a985ce04ca5b4cbdcf062540fb4001d1222beed8ae064513b782d65"} Dec 05 12:33:34 crc kubenswrapper[4711]: I1205 12:33:34.560978 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkptm" event={"ID":"d6821e63-e515-4ffc-8e62-cb7b2170e7f6","Type":"ContainerStarted","Data":"3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813"} Dec 05 12:33:35 crc kubenswrapper[4711]: I1205 12:33:35.570727 4711 generic.go:334] "Generic (PLEG): container finished" podID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerID="3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813" exitCode=0 Dec 05 12:33:35 crc kubenswrapper[4711]: I1205 12:33:35.570763 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkptm" event={"ID":"d6821e63-e515-4ffc-8e62-cb7b2170e7f6","Type":"ContainerDied","Data":"3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813"} Dec 05 12:33:36 crc kubenswrapper[4711]: I1205 12:33:36.581164 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkptm" event={"ID":"d6821e63-e515-4ffc-8e62-cb7b2170e7f6","Type":"ContainerStarted","Data":"0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c"} Dec 05 12:33:36 crc kubenswrapper[4711]: I1205 12:33:36.600642 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wkptm" podStartSLOduration=2.19734631 podStartE2EDuration="4.600626135s" podCreationTimestamp="2025-12-05 12:33:32 +0000 UTC" firstStartedPulling="2025-12-05 12:33:33.549447959 +0000 UTC m=+1459.133770299" lastFinishedPulling="2025-12-05 12:33:35.952727754 +0000 UTC m=+1461.537050124" observedRunningTime="2025-12-05 12:33:36.599643741 +0000 UTC m=+1462.183966071" watchObservedRunningTime="2025-12-05 12:33:36.600626135 +0000 UTC m=+1462.184948465" Dec 05 12:33:37 crc kubenswrapper[4711]: I1205 12:33:37.879157 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 12:33:37 crc kubenswrapper[4711]: I1205 12:33:37.879583 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 12:33:37 crc kubenswrapper[4711]: I1205 12:33:37.918044 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 12:33:37 crc kubenswrapper[4711]: I1205 12:33:37.944976 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 12:33:38 crc kubenswrapper[4711]: I1205 12:33:38.639063 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 12:33:38 crc kubenswrapper[4711]: I1205 12:33:38.893711 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="abe3c847-4536-4e00-943c-14ecbb8f9600" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:33:38 crc kubenswrapper[4711]: I1205 12:33:38.893672 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="abe3c847-4536-4e00-943c-14ecbb8f9600" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:33:38 crc kubenswrapper[4711]: I1205 12:33:38.898694 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:33:38 crc kubenswrapper[4711]: I1205 12:33:38.898761 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:33:39 crc kubenswrapper[4711]: I1205 12:33:39.910723 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="da6eb04a-87a2-4470-b96a-97e7bbc52a4a" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.223:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:33:39 crc kubenswrapper[4711]: I1205 12:33:39.910760 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="da6eb04a-87a2-4470-b96a-97e7bbc52a4a" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.223:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:33:42 crc kubenswrapper[4711]: I1205 12:33:42.390996 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:42 crc kubenswrapper[4711]: I1205 12:33:42.394659 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:42 crc kubenswrapper[4711]: I1205 12:33:42.443007 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:42 crc kubenswrapper[4711]: I1205 12:33:42.695677 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:42 crc kubenswrapper[4711]: I1205 12:33:42.742680 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wkptm"] Dec 05 12:33:44 crc kubenswrapper[4711]: I1205 12:33:44.656075 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wkptm" podUID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerName="registry-server" containerID="cri-o://0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c" gracePeriod=2 Dec 05 12:33:44 crc kubenswrapper[4711]: I1205 12:33:44.699331 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.158286 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.242410 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-catalog-content\") pod \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.242689 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9p87\" (UniqueName: \"kubernetes.io/projected/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-kube-api-access-n9p87\") pod \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.242728 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-utilities\") pod \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\" (UID: \"d6821e63-e515-4ffc-8e62-cb7b2170e7f6\") " Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.243470 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-utilities" (OuterVolumeSpecName: "utilities") pod "d6821e63-e515-4ffc-8e62-cb7b2170e7f6" (UID: "d6821e63-e515-4ffc-8e62-cb7b2170e7f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.248997 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-kube-api-access-n9p87" (OuterVolumeSpecName: "kube-api-access-n9p87") pod "d6821e63-e515-4ffc-8e62-cb7b2170e7f6" (UID: "d6821e63-e515-4ffc-8e62-cb7b2170e7f6"). InnerVolumeSpecName "kube-api-access-n9p87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.298316 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d6821e63-e515-4ffc-8e62-cb7b2170e7f6" (UID: "d6821e63-e515-4ffc-8e62-cb7b2170e7f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.345484 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9p87\" (UniqueName: \"kubernetes.io/projected/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-kube-api-access-n9p87\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.345530 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.345544 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6821e63-e515-4ffc-8e62-cb7b2170e7f6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.665981 4711 generic.go:334] "Generic (PLEG): container finished" podID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerID="0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c" exitCode=0 Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.666028 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkptm" event={"ID":"d6821e63-e515-4ffc-8e62-cb7b2170e7f6","Type":"ContainerDied","Data":"0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c"} Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.666053 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkptm" event={"ID":"d6821e63-e515-4ffc-8e62-cb7b2170e7f6","Type":"ContainerDied","Data":"6267dad76a985ce04ca5b4cbdcf062540fb4001d1222beed8ae064513b782d65"} Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.666070 4711 scope.go:117] "RemoveContainer" containerID="0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.666190 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wkptm" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.690337 4711 scope.go:117] "RemoveContainer" containerID="3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.716710 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wkptm"] Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.754540 4711 scope.go:117] "RemoveContainer" containerID="593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.754644 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wkptm"] Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.776119 4711 scope.go:117] "RemoveContainer" containerID="0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c" Dec 05 12:33:45 crc kubenswrapper[4711]: E1205 12:33:45.776607 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c\": container with ID starting with 0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c not found: ID does not exist" containerID="0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.776708 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c"} err="failed to get container status \"0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c\": rpc error: code = NotFound desc = could not find container \"0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c\": container with ID starting with 0538d273108ce852f357ad03344f2abe05163a81c4f21bf5e9468376015d446c not found: ID does not exist" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.776786 4711 scope.go:117] "RemoveContainer" containerID="3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813" Dec 05 12:33:45 crc kubenswrapper[4711]: E1205 12:33:45.777201 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813\": container with ID starting with 3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813 not found: ID does not exist" containerID="3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.777229 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813"} err="failed to get container status \"3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813\": rpc error: code = NotFound desc = could not find container \"3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813\": container with ID starting with 3368050b27bdea43fe818c4396aa44270b73d43122e917bcd3b8ea1f794ce813 not found: ID does not exist" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.777246 4711 scope.go:117] "RemoveContainer" containerID="593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a" Dec 05 12:33:45 crc kubenswrapper[4711]: E1205 12:33:45.777495 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a\": container with ID starting with 593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a not found: ID does not exist" containerID="593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a" Dec 05 12:33:45 crc kubenswrapper[4711]: I1205 12:33:45.777521 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a"} err="failed to get container status \"593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a\": rpc error: code = NotFound desc = could not find container \"593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a\": container with ID starting with 593034e0ed24b1cd63ae726c616f340028990ff26f66f6edbc8e4600d0b3c33a not found: ID does not exist" Dec 05 12:33:46 crc kubenswrapper[4711]: I1205 12:33:46.697241 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" path="/var/lib/kubelet/pods/d6821e63-e515-4ffc-8e62-cb7b2170e7f6/volumes" Dec 05 12:33:47 crc kubenswrapper[4711]: I1205 12:33:47.884295 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 12:33:47 crc kubenswrapper[4711]: I1205 12:33:47.884720 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 12:33:47 crc kubenswrapper[4711]: I1205 12:33:47.892422 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 12:33:47 crc kubenswrapper[4711]: I1205 12:33:47.893983 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 12:33:48 crc kubenswrapper[4711]: I1205 12:33:48.301148 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:33:48 crc kubenswrapper[4711]: I1205 12:33:48.301220 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:33:48 crc kubenswrapper[4711]: I1205 12:33:48.906338 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 12:33:48 crc kubenswrapper[4711]: I1205 12:33:48.906985 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 12:33:48 crc kubenswrapper[4711]: I1205 12:33:48.911714 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 12:33:48 crc kubenswrapper[4711]: I1205 12:33:48.917244 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 12:33:49 crc kubenswrapper[4711]: I1205 12:33:49.708784 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 12:33:49 crc kubenswrapper[4711]: I1205 12:33:49.718419 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 12:33:57 crc kubenswrapper[4711]: I1205 12:33:57.828690 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:33:58 crc kubenswrapper[4711]: I1205 12:33:58.975399 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:34:01 crc kubenswrapper[4711]: I1205 12:34:01.614288 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" containerName="rabbitmq" containerID="cri-o://53b8334dfadeb2202608549200bcd680b7af0b50d02278712b930c5b5275f6f9" gracePeriod=604797 Dec 05 12:34:02 crc kubenswrapper[4711]: I1205 12:34:02.539803 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" containerName="rabbitmq" containerID="cri-o://5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849" gracePeriod=604797 Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.046490 4711 generic.go:334] "Generic (PLEG): container finished" podID="c5ef8db4-5a97-483c-a168-bcf6368849a2" containerID="53b8334dfadeb2202608549200bcd680b7af0b50d02278712b930c5b5275f6f9" exitCode=0 Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.046550 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5ef8db4-5a97-483c-a168-bcf6368849a2","Type":"ContainerDied","Data":"53b8334dfadeb2202608549200bcd680b7af0b50d02278712b930c5b5275f6f9"} Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.421547 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.447369 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-confd\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.447549 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-server-conf\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.447611 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-plugins\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.447632 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-erlang-cookie\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.447715 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.447905 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-tls\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.447984 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-config-data\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.448048 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c5ef8db4-5a97-483c-a168-bcf6368849a2-erlang-cookie-secret\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.448077 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c5ef8db4-5a97-483c-a168-bcf6368849a2-pod-info\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.448126 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czcns\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-kube-api-access-czcns\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.448142 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-plugins-conf\") pod \"c5ef8db4-5a97-483c-a168-bcf6368849a2\" (UID: \"c5ef8db4-5a97-483c-a168-bcf6368849a2\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.449028 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.449422 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.450197 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.462661 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-kube-api-access-czcns" (OuterVolumeSpecName: "kube-api-access-czcns") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "kube-api-access-czcns". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.462729 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5ef8db4-5a97-483c-a168-bcf6368849a2-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.487195 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.487762 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c5ef8db4-5a97-483c-a168-bcf6368849a2-pod-info" (OuterVolumeSpecName: "pod-info") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.501158 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.538197 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-config-data" (OuterVolumeSpecName: "config-data") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.564548 4711 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.566698 4711 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.566754 4711 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.566771 4711 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.566784 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.566793 4711 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c5ef8db4-5a97-483c-a168-bcf6368849a2-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.566802 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czcns\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-kube-api-access-czcns\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.566810 4711 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.566819 4711 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c5ef8db4-5a97-483c-a168-bcf6368849a2-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.587941 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-server-conf" (OuterVolumeSpecName: "server-conf") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.636900 4711 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.661920 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c5ef8db4-5a97-483c-a168-bcf6368849a2" (UID: "c5ef8db4-5a97-483c-a168-bcf6368849a2"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.668638 4711 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.668742 4711 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c5ef8db4-5a97-483c-a168-bcf6368849a2-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.668799 4711 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c5ef8db4-5a97-483c-a168-bcf6368849a2-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.687152 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770068 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bff43887-0cb0-4da0-a16f-6264877c473e-pod-info\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770107 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-plugins\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770157 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770182 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-config-data\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770240 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-tls\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770286 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz9lp\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-kube-api-access-zz9lp\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770301 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-plugins-conf\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770336 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-confd\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770368 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-erlang-cookie\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770532 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-server-conf\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.770617 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bff43887-0cb0-4da0-a16f-6264877c473e-erlang-cookie-secret\") pod \"bff43887-0cb0-4da0-a16f-6264877c473e\" (UID: \"bff43887-0cb0-4da0-a16f-6264877c473e\") " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.775264 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.776446 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.778686 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.784020 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.784123 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bff43887-0cb0-4da0-a16f-6264877c473e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.785147 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-kube-api-access-zz9lp" (OuterVolumeSpecName: "kube-api-access-zz9lp") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "kube-api-access-zz9lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.785210 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/bff43887-0cb0-4da0-a16f-6264877c473e-pod-info" (OuterVolumeSpecName: "pod-info") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.785763 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.828175 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-config-data" (OuterVolumeSpecName: "config-data") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.840292 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-server-conf" (OuterVolumeSpecName: "server-conf") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.872988 4711 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bff43887-0cb0-4da0-a16f-6264877c473e-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.873024 4711 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.873053 4711 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.873064 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.873073 4711 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.873083 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz9lp\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-kube-api-access-zz9lp\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.873091 4711 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.873102 4711 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.873110 4711 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bff43887-0cb0-4da0-a16f-6264877c473e-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.873145 4711 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bff43887-0cb0-4da0-a16f-6264877c473e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.901490 4711 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.927598 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "bff43887-0cb0-4da0-a16f-6264877c473e" (UID: "bff43887-0cb0-4da0-a16f-6264877c473e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.975469 4711 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:04 crc kubenswrapper[4711]: I1205 12:34:04.975516 4711 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bff43887-0cb0-4da0-a16f-6264877c473e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.057689 4711 generic.go:334] "Generic (PLEG): container finished" podID="bff43887-0cb0-4da0-a16f-6264877c473e" containerID="5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849" exitCode=0 Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.057736 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.057751 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bff43887-0cb0-4da0-a16f-6264877c473e","Type":"ContainerDied","Data":"5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849"} Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.057813 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bff43887-0cb0-4da0-a16f-6264877c473e","Type":"ContainerDied","Data":"510ca734e87fa52d76b2e6cefcc4ffb4a033bcb1aedee970c8fe0dd4422caede"} Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.057830 4711 scope.go:117] "RemoveContainer" containerID="5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.061059 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5ef8db4-5a97-483c-a168-bcf6368849a2","Type":"ContainerDied","Data":"fca05599870c7dc542b1e55be001a2eae81916699792d388099d7deb20454cb3"} Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.061156 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.086242 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.096046 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.099807 4711 scope.go:117] "RemoveContainer" containerID="b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.107613 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.118416 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.129984 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:34:05 crc kubenswrapper[4711]: E1205 12:34:05.130543 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerName="extract-utilities" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130563 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerName="extract-utilities" Dec 05 12:34:05 crc kubenswrapper[4711]: E1205 12:34:05.130577 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerName="extract-content" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130584 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerName="extract-content" Dec 05 12:34:05 crc kubenswrapper[4711]: E1205 12:34:05.130597 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerName="registry-server" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130605 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerName="registry-server" Dec 05 12:34:05 crc kubenswrapper[4711]: E1205 12:34:05.130623 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" containerName="rabbitmq" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130630 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" containerName="rabbitmq" Dec 05 12:34:05 crc kubenswrapper[4711]: E1205 12:34:05.130642 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" containerName="rabbitmq" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130649 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" containerName="rabbitmq" Dec 05 12:34:05 crc kubenswrapper[4711]: E1205 12:34:05.130659 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" containerName="setup-container" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130666 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" containerName="setup-container" Dec 05 12:34:05 crc kubenswrapper[4711]: E1205 12:34:05.130698 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" containerName="setup-container" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130705 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" containerName="setup-container" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130924 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6821e63-e515-4ffc-8e62-cb7b2170e7f6" containerName="registry-server" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130945 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" containerName="rabbitmq" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.130978 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" containerName="rabbitmq" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.132269 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.139199 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.139517 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.139713 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.139847 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.141106 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.141164 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.141232 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-g8wzk" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.148165 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.152283 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.156051 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-w7khg" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.156275 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.156417 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.156526 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.156711 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.157654 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.157804 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.158879 4711 scope.go:117] "RemoveContainer" containerID="5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849" Dec 05 12:34:05 crc kubenswrapper[4711]: E1205 12:34:05.159339 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849\": container with ID starting with 5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849 not found: ID does not exist" containerID="5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.159374 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849"} err="failed to get container status \"5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849\": rpc error: code = NotFound desc = could not find container \"5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849\": container with ID starting with 5c6d74aafc0b4d847fc993ab63416c8919213ded9ed982c0487dbc9853980849 not found: ID does not exist" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.159433 4711 scope.go:117] "RemoveContainer" containerID="b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec" Dec 05 12:34:05 crc kubenswrapper[4711]: E1205 12:34:05.162980 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec\": container with ID starting with b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec not found: ID does not exist" containerID="b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.163027 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec"} err="failed to get container status \"b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec\": rpc error: code = NotFound desc = could not find container \"b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec\": container with ID starting with b5034b39d3d562d37531cdaac82219a8860de56bd371fd38d49b3c123cb0f3ec not found: ID does not exist" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.163053 4711 scope.go:117] "RemoveContainer" containerID="53b8334dfadeb2202608549200bcd680b7af0b50d02278712b930c5b5275f6f9" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.172097 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183038 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183081 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183131 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183173 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183239 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183287 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183311 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183344 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-config-data\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183365 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183402 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.183437 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5ljt\" (UniqueName: \"kubernetes.io/projected/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-kube-api-access-n5ljt\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.188357 4711 scope.go:117] "RemoveContainer" containerID="47f67c484cab171bc640a44e92ecd16dea282d80e2d48d886adc47eb22d914fe" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.191803 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285578 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285652 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1719e95a-bfa1-4302-b7f5-08acd0d41d93-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285685 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-config-data\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285705 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285726 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285793 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285827 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5ljt\" (UniqueName: \"kubernetes.io/projected/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-kube-api-access-n5ljt\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285844 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1719e95a-bfa1-4302-b7f5-08acd0d41d93-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285895 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285921 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285964 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.285993 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1719e95a-bfa1-4302-b7f5-08acd0d41d93-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286013 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1719e95a-bfa1-4302-b7f5-08acd0d41d93-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286075 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzjsq\" (UniqueName: \"kubernetes.io/projected/1719e95a-bfa1-4302-b7f5-08acd0d41d93-kube-api-access-nzjsq\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286101 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286144 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286260 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286311 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286337 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1719e95a-bfa1-4302-b7f5-08acd0d41d93-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286380 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286436 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286463 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286607 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286810 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-config-data\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.286967 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.287007 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.287599 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.288046 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.291965 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.292029 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.294773 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.296864 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.307486 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5ljt\" (UniqueName: \"kubernetes.io/projected/314912fb-0c68-4fc7-9472-f84b1f0ab8cd-kube-api-access-n5ljt\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.333728 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"314912fb-0c68-4fc7-9472-f84b1f0ab8cd\") " pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.387632 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.387688 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1719e95a-bfa1-4302-b7f5-08acd0d41d93-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.387759 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1719e95a-bfa1-4302-b7f5-08acd0d41d93-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.387777 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1719e95a-bfa1-4302-b7f5-08acd0d41d93-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.387804 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzjsq\" (UniqueName: \"kubernetes.io/projected/1719e95a-bfa1-4302-b7f5-08acd0d41d93-kube-api-access-nzjsq\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.387838 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.387847 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.388658 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1719e95a-bfa1-4302-b7f5-08acd0d41d93-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.389435 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1719e95a-bfa1-4302-b7f5-08acd0d41d93-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.387862 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.389627 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1719e95a-bfa1-4302-b7f5-08acd0d41d93-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.389702 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.389758 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.389825 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1719e95a-bfa1-4302-b7f5-08acd0d41d93-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.390798 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1719e95a-bfa1-4302-b7f5-08acd0d41d93-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.391093 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.391792 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.392427 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1719e95a-bfa1-4302-b7f5-08acd0d41d93-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.397436 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1719e95a-bfa1-4302-b7f5-08acd0d41d93-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.397716 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.406040 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1719e95a-bfa1-4302-b7f5-08acd0d41d93-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.415144 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzjsq\" (UniqueName: \"kubernetes.io/projected/1719e95a-bfa1-4302-b7f5-08acd0d41d93-kube-api-access-nzjsq\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.437628 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1719e95a-bfa1-4302-b7f5-08acd0d41d93\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.455810 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.484688 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.933070 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hznc8"] Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.936144 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:05 crc kubenswrapper[4711]: I1205 12:34:05.959293 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hznc8"] Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.022522 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.076606 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1719e95a-bfa1-4302-b7f5-08acd0d41d93","Type":"ContainerStarted","Data":"4511cee43eacc5e5b04eb5ba1dbc430f348f2767ae78738de1d7a3f73e9251dc"} Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.109046 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-utilities\") pod \"redhat-marketplace-hznc8\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.109426 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-catalog-content\") pod \"redhat-marketplace-hznc8\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.109511 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8v6j\" (UniqueName: \"kubernetes.io/projected/40188c7a-cea7-4049-808f-aa4a4016bcb9-kube-api-access-h8v6j\") pod \"redhat-marketplace-hznc8\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.129409 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.212819 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8v6j\" (UniqueName: \"kubernetes.io/projected/40188c7a-cea7-4049-808f-aa4a4016bcb9-kube-api-access-h8v6j\") pod \"redhat-marketplace-hznc8\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.213006 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-utilities\") pod \"redhat-marketplace-hznc8\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.213032 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-catalog-content\") pod \"redhat-marketplace-hznc8\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.213615 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-catalog-content\") pod \"redhat-marketplace-hznc8\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.213761 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-utilities\") pod \"redhat-marketplace-hznc8\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.228437 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8v6j\" (UniqueName: \"kubernetes.io/projected/40188c7a-cea7-4049-808f-aa4a4016bcb9-kube-api-access-h8v6j\") pod \"redhat-marketplace-hznc8\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.274208 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.695018 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bff43887-0cb0-4da0-a16f-6264877c473e" path="/var/lib/kubelet/pods/bff43887-0cb0-4da0-a16f-6264877c473e/volumes" Dec 05 12:34:06 crc kubenswrapper[4711]: W1205 12:34:06.753772 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40188c7a_cea7_4049_808f_aa4a4016bcb9.slice/crio-e295b05e8bc96ab8df0715966530456b4ebfe6af3d71a9fac6508b840a1ec89c WatchSource:0}: Error finding container e295b05e8bc96ab8df0715966530456b4ebfe6af3d71a9fac6508b840a1ec89c: Status 404 returned error can't find the container with id e295b05e8bc96ab8df0715966530456b4ebfe6af3d71a9fac6508b840a1ec89c Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.757379 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5ef8db4-5a97-483c-a168-bcf6368849a2" path="/var/lib/kubelet/pods/c5ef8db4-5a97-483c-a168-bcf6368849a2/volumes" Dec 05 12:34:06 crc kubenswrapper[4711]: I1205 12:34:06.758178 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hznc8"] Dec 05 12:34:07 crc kubenswrapper[4711]: I1205 12:34:07.089266 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hznc8" event={"ID":"40188c7a-cea7-4049-808f-aa4a4016bcb9","Type":"ContainerStarted","Data":"e295b05e8bc96ab8df0715966530456b4ebfe6af3d71a9fac6508b840a1ec89c"} Dec 05 12:34:07 crc kubenswrapper[4711]: I1205 12:34:07.091487 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"314912fb-0c68-4fc7-9472-f84b1f0ab8cd","Type":"ContainerStarted","Data":"fa6dc8fb2f19d6ce1e57996c2d3fc977f81bc6765c5b92cc7ee778437cfc6ebe"} Dec 05 12:34:08 crc kubenswrapper[4711]: I1205 12:34:08.105139 4711 generic.go:334] "Generic (PLEG): container finished" podID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerID="5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca" exitCode=0 Dec 05 12:34:08 crc kubenswrapper[4711]: I1205 12:34:08.105222 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hznc8" event={"ID":"40188c7a-cea7-4049-808f-aa4a4016bcb9","Type":"ContainerDied","Data":"5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca"} Dec 05 12:34:08 crc kubenswrapper[4711]: I1205 12:34:08.108337 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"314912fb-0c68-4fc7-9472-f84b1f0ab8cd","Type":"ContainerStarted","Data":"e2366c8d44311d1dab56e64b069ff42d4f6a07de38ad62b44959e017f8d5108c"} Dec 05 12:34:08 crc kubenswrapper[4711]: I1205 12:34:08.111345 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1719e95a-bfa1-4302-b7f5-08acd0d41d93","Type":"ContainerStarted","Data":"ebd4d340f201def0519f94d5d27b283ff287acf7528e01b69476c48d01bf5cec"} Dec 05 12:34:09 crc kubenswrapper[4711]: I1205 12:34:09.130641 4711 generic.go:334] "Generic (PLEG): container finished" podID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerID="0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509" exitCode=0 Dec 05 12:34:09 crc kubenswrapper[4711]: I1205 12:34:09.130756 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hznc8" event={"ID":"40188c7a-cea7-4049-808f-aa4a4016bcb9","Type":"ContainerDied","Data":"0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509"} Dec 05 12:34:10 crc kubenswrapper[4711]: I1205 12:34:10.143640 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hznc8" event={"ID":"40188c7a-cea7-4049-808f-aa4a4016bcb9","Type":"ContainerStarted","Data":"15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6"} Dec 05 12:34:10 crc kubenswrapper[4711]: I1205 12:34:10.170762 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hznc8" podStartSLOduration=3.764108146 podStartE2EDuration="5.170736454s" podCreationTimestamp="2025-12-05 12:34:05 +0000 UTC" firstStartedPulling="2025-12-05 12:34:08.108055452 +0000 UTC m=+1493.692377782" lastFinishedPulling="2025-12-05 12:34:09.51468376 +0000 UTC m=+1495.099006090" observedRunningTime="2025-12-05 12:34:10.162876258 +0000 UTC m=+1495.747198588" watchObservedRunningTime="2025-12-05 12:34:10.170736454 +0000 UTC m=+1495.755058784" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.037827 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d88795c5c-tx6t4"] Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.042131 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.044913 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.055074 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d88795c5c-tx6t4"] Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.134118 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-swift-storage-0\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.134249 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7ggc\" (UniqueName: \"kubernetes.io/projected/b4d3c8b7-0400-4f17-a232-fdbe0902542f-kube-api-access-r7ggc\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.134288 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-svc\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.134318 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-nb\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.134449 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-sb\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.134495 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.134523 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-config\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.236452 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7ggc\" (UniqueName: \"kubernetes.io/projected/b4d3c8b7-0400-4f17-a232-fdbe0902542f-kube-api-access-r7ggc\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.236524 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-svc\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.236556 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-nb\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.236658 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-sb\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.236699 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.236726 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-config\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.236770 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-swift-storage-0\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.237882 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-sb\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.237944 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-swift-storage-0\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.237952 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-svc\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.238137 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-nb\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.238412 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.238849 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-config\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.262430 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7ggc\" (UniqueName: \"kubernetes.io/projected/b4d3c8b7-0400-4f17-a232-fdbe0902542f-kube-api-access-r7ggc\") pod \"dnsmasq-dns-5d88795c5c-tx6t4\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.364696 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:12 crc kubenswrapper[4711]: I1205 12:34:12.841289 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d88795c5c-tx6t4"] Dec 05 12:34:12 crc kubenswrapper[4711]: W1205 12:34:12.853589 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4d3c8b7_0400_4f17_a232_fdbe0902542f.slice/crio-d422a4186f44801e8519b39277f6ea1f4b65c6edfd82c49e433caba2986e0c79 WatchSource:0}: Error finding container d422a4186f44801e8519b39277f6ea1f4b65c6edfd82c49e433caba2986e0c79: Status 404 returned error can't find the container with id d422a4186f44801e8519b39277f6ea1f4b65c6edfd82c49e433caba2986e0c79 Dec 05 12:34:13 crc kubenswrapper[4711]: I1205 12:34:13.184922 4711 generic.go:334] "Generic (PLEG): container finished" podID="b4d3c8b7-0400-4f17-a232-fdbe0902542f" containerID="3fec3264529252a717609eb042de31ee412868ef221484a0f9b3d7581d63a129" exitCode=0 Dec 05 12:34:13 crc kubenswrapper[4711]: I1205 12:34:13.185022 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" event={"ID":"b4d3c8b7-0400-4f17-a232-fdbe0902542f","Type":"ContainerDied","Data":"3fec3264529252a717609eb042de31ee412868ef221484a0f9b3d7581d63a129"} Dec 05 12:34:13 crc kubenswrapper[4711]: I1205 12:34:13.186188 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" event={"ID":"b4d3c8b7-0400-4f17-a232-fdbe0902542f","Type":"ContainerStarted","Data":"d422a4186f44801e8519b39277f6ea1f4b65c6edfd82c49e433caba2986e0c79"} Dec 05 12:34:14 crc kubenswrapper[4711]: I1205 12:34:14.198091 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" event={"ID":"b4d3c8b7-0400-4f17-a232-fdbe0902542f","Type":"ContainerStarted","Data":"79abcf7e5b9ec49537cdbf63dc555c29d8ba17b75b69b519ae22fe46a19a8636"} Dec 05 12:34:14 crc kubenswrapper[4711]: I1205 12:34:14.198454 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:14 crc kubenswrapper[4711]: I1205 12:34:14.230821 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" podStartSLOduration=2.2308032 podStartE2EDuration="2.2308032s" podCreationTimestamp="2025-12-05 12:34:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:34:14.22636402 +0000 UTC m=+1499.810686380" watchObservedRunningTime="2025-12-05 12:34:14.2308032 +0000 UTC m=+1499.815125530" Dec 05 12:34:16 crc kubenswrapper[4711]: I1205 12:34:16.274843 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:16 crc kubenswrapper[4711]: I1205 12:34:16.276204 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:16 crc kubenswrapper[4711]: I1205 12:34:16.321497 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:17 crc kubenswrapper[4711]: I1205 12:34:17.276229 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:17 crc kubenswrapper[4711]: I1205 12:34:17.328171 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hznc8"] Dec 05 12:34:18 crc kubenswrapper[4711]: I1205 12:34:18.301113 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:34:18 crc kubenswrapper[4711]: I1205 12:34:18.301226 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:34:19 crc kubenswrapper[4711]: I1205 12:34:19.245247 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hznc8" podUID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerName="registry-server" containerID="cri-o://15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6" gracePeriod=2 Dec 05 12:34:19 crc kubenswrapper[4711]: I1205 12:34:19.721773 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:19 crc kubenswrapper[4711]: I1205 12:34:19.898573 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-catalog-content\") pod \"40188c7a-cea7-4049-808f-aa4a4016bcb9\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " Dec 05 12:34:19 crc kubenswrapper[4711]: I1205 12:34:19.898906 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-utilities\") pod \"40188c7a-cea7-4049-808f-aa4a4016bcb9\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " Dec 05 12:34:19 crc kubenswrapper[4711]: I1205 12:34:19.898979 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8v6j\" (UniqueName: \"kubernetes.io/projected/40188c7a-cea7-4049-808f-aa4a4016bcb9-kube-api-access-h8v6j\") pod \"40188c7a-cea7-4049-808f-aa4a4016bcb9\" (UID: \"40188c7a-cea7-4049-808f-aa4a4016bcb9\") " Dec 05 12:34:19 crc kubenswrapper[4711]: I1205 12:34:19.899830 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-utilities" (OuterVolumeSpecName: "utilities") pod "40188c7a-cea7-4049-808f-aa4a4016bcb9" (UID: "40188c7a-cea7-4049-808f-aa4a4016bcb9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:34:19 crc kubenswrapper[4711]: I1205 12:34:19.905463 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40188c7a-cea7-4049-808f-aa4a4016bcb9-kube-api-access-h8v6j" (OuterVolumeSpecName: "kube-api-access-h8v6j") pod "40188c7a-cea7-4049-808f-aa4a4016bcb9" (UID: "40188c7a-cea7-4049-808f-aa4a4016bcb9"). InnerVolumeSpecName "kube-api-access-h8v6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:19 crc kubenswrapper[4711]: I1205 12:34:19.924574 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "40188c7a-cea7-4049-808f-aa4a4016bcb9" (UID: "40188c7a-cea7-4049-808f-aa4a4016bcb9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.001526 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.001583 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8v6j\" (UniqueName: \"kubernetes.io/projected/40188c7a-cea7-4049-808f-aa4a4016bcb9-kube-api-access-h8v6j\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.001599 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40188c7a-cea7-4049-808f-aa4a4016bcb9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.256547 4711 generic.go:334] "Generic (PLEG): container finished" podID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerID="15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6" exitCode=0 Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.256618 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hznc8" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.256885 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hznc8" event={"ID":"40188c7a-cea7-4049-808f-aa4a4016bcb9","Type":"ContainerDied","Data":"15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6"} Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.257028 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hznc8" event={"ID":"40188c7a-cea7-4049-808f-aa4a4016bcb9","Type":"ContainerDied","Data":"e295b05e8bc96ab8df0715966530456b4ebfe6af3d71a9fac6508b840a1ec89c"} Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.257122 4711 scope.go:117] "RemoveContainer" containerID="15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.292347 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hznc8"] Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.292776 4711 scope.go:117] "RemoveContainer" containerID="0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.306042 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hznc8"] Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.328313 4711 scope.go:117] "RemoveContainer" containerID="5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.374978 4711 scope.go:117] "RemoveContainer" containerID="15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6" Dec 05 12:34:20 crc kubenswrapper[4711]: E1205 12:34:20.375417 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6\": container with ID starting with 15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6 not found: ID does not exist" containerID="15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.375456 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6"} err="failed to get container status \"15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6\": rpc error: code = NotFound desc = could not find container \"15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6\": container with ID starting with 15333ffeac36625785bc3b7b3a4fbebbf4c0bbb2404c0b47e8e7d8a2fb8dddd6 not found: ID does not exist" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.375483 4711 scope.go:117] "RemoveContainer" containerID="0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509" Dec 05 12:34:20 crc kubenswrapper[4711]: E1205 12:34:20.375812 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509\": container with ID starting with 0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509 not found: ID does not exist" containerID="0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.375840 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509"} err="failed to get container status \"0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509\": rpc error: code = NotFound desc = could not find container \"0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509\": container with ID starting with 0cca7be4c51de67b22fc7fbe38379de89b90177ad69a2818059881288ff26509 not found: ID does not exist" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.375856 4711 scope.go:117] "RemoveContainer" containerID="5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca" Dec 05 12:34:20 crc kubenswrapper[4711]: E1205 12:34:20.376157 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca\": container with ID starting with 5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca not found: ID does not exist" containerID="5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.376213 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca"} err="failed to get container status \"5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca\": rpc error: code = NotFound desc = could not find container \"5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca\": container with ID starting with 5faa1a6a0c8969d5805bc26f4b372725023a4f40b7a329f3ed23929b3f6d20ca not found: ID does not exist" Dec 05 12:34:20 crc kubenswrapper[4711]: I1205 12:34:20.697650 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40188c7a-cea7-4049-808f-aa4a4016bcb9" path="/var/lib/kubelet/pods/40188c7a-cea7-4049-808f-aa4a4016bcb9/volumes" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.365556 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.425309 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69cc8c865c-d9tkn"] Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.425606 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" podUID="3c86ec8c-e349-4b7b-a4c8-21778f352703" containerName="dnsmasq-dns" containerID="cri-o://16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632" gracePeriod=10 Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.588450 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c85bff75-jnh4g"] Dec 05 12:34:22 crc kubenswrapper[4711]: E1205 12:34:22.589226 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerName="registry-server" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.589237 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerName="registry-server" Dec 05 12:34:22 crc kubenswrapper[4711]: E1205 12:34:22.589263 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerName="extract-content" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.589270 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerName="extract-content" Dec 05 12:34:22 crc kubenswrapper[4711]: E1205 12:34:22.589285 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerName="extract-utilities" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.589291 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerName="extract-utilities" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.589643 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="40188c7a-cea7-4049-808f-aa4a4016bcb9" containerName="registry-server" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.590709 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.616840 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c85bff75-jnh4g"] Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.767901 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-openstack-edpm-ipam\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.768028 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-config\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.768089 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-dns-swift-storage-0\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.768166 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-dns-svc\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.768262 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-ovsdbserver-sb\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.768321 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-ovsdbserver-nb\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.768360 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdsh2\" (UniqueName: \"kubernetes.io/projected/54f1fb07-feb6-4d6e-8264-f74beb20b77e-kube-api-access-qdsh2\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.872807 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-dns-swift-storage-0\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.873798 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-dns-swift-storage-0\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.875118 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-dns-svc\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.875260 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-ovsdbserver-sb\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.875304 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-ovsdbserver-nb\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.875336 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdsh2\" (UniqueName: \"kubernetes.io/projected/54f1fb07-feb6-4d6e-8264-f74beb20b77e-kube-api-access-qdsh2\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.875450 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-openstack-edpm-ipam\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.875526 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-config\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.875913 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-dns-svc\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.876165 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-ovsdbserver-sb\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.876649 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-ovsdbserver-nb\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.876669 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-config\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.876693 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/54f1fb07-feb6-4d6e-8264-f74beb20b77e-openstack-edpm-ipam\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.931426 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdsh2\" (UniqueName: \"kubernetes.io/projected/54f1fb07-feb6-4d6e-8264-f74beb20b77e-kube-api-access-qdsh2\") pod \"dnsmasq-dns-6c85bff75-jnh4g\" (UID: \"54f1fb07-feb6-4d6e-8264-f74beb20b77e\") " pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:22 crc kubenswrapper[4711]: I1205 12:34:22.946271 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.079308 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.180491 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-svc\") pod \"3c86ec8c-e349-4b7b-a4c8-21778f352703\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.180594 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-config\") pod \"3c86ec8c-e349-4b7b-a4c8-21778f352703\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.180638 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-swift-storage-0\") pod \"3c86ec8c-e349-4b7b-a4c8-21778f352703\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.180708 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-nb\") pod \"3c86ec8c-e349-4b7b-a4c8-21778f352703\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.180855 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-sb\") pod \"3c86ec8c-e349-4b7b-a4c8-21778f352703\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.180878 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5p5c\" (UniqueName: \"kubernetes.io/projected/3c86ec8c-e349-4b7b-a4c8-21778f352703-kube-api-access-s5p5c\") pod \"3c86ec8c-e349-4b7b-a4c8-21778f352703\" (UID: \"3c86ec8c-e349-4b7b-a4c8-21778f352703\") " Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.186672 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c86ec8c-e349-4b7b-a4c8-21778f352703-kube-api-access-s5p5c" (OuterVolumeSpecName: "kube-api-access-s5p5c") pod "3c86ec8c-e349-4b7b-a4c8-21778f352703" (UID: "3c86ec8c-e349-4b7b-a4c8-21778f352703"). InnerVolumeSpecName "kube-api-access-s5p5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.261809 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3c86ec8c-e349-4b7b-a4c8-21778f352703" (UID: "3c86ec8c-e349-4b7b-a4c8-21778f352703"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.266440 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-config" (OuterVolumeSpecName: "config") pod "3c86ec8c-e349-4b7b-a4c8-21778f352703" (UID: "3c86ec8c-e349-4b7b-a4c8-21778f352703"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.278079 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3c86ec8c-e349-4b7b-a4c8-21778f352703" (UID: "3c86ec8c-e349-4b7b-a4c8-21778f352703"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.282873 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5p5c\" (UniqueName: \"kubernetes.io/projected/3c86ec8c-e349-4b7b-a4c8-21778f352703-kube-api-access-s5p5c\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.282899 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.282909 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.282917 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.286856 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3c86ec8c-e349-4b7b-a4c8-21778f352703" (UID: "3c86ec8c-e349-4b7b-a4c8-21778f352703"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.287997 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3c86ec8c-e349-4b7b-a4c8-21778f352703" (UID: "3c86ec8c-e349-4b7b-a4c8-21778f352703"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.293256 4711 generic.go:334] "Generic (PLEG): container finished" podID="3c86ec8c-e349-4b7b-a4c8-21778f352703" containerID="16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632" exitCode=0 Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.293292 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" event={"ID":"3c86ec8c-e349-4b7b-a4c8-21778f352703","Type":"ContainerDied","Data":"16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632"} Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.293319 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" event={"ID":"3c86ec8c-e349-4b7b-a4c8-21778f352703","Type":"ContainerDied","Data":"ce0cad81d4452eaabf20d8be9a0f5e5d8c833acf62fba203b584cf52013acc97"} Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.293339 4711 scope.go:117] "RemoveContainer" containerID="16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.293499 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69cc8c865c-d9tkn" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.330584 4711 scope.go:117] "RemoveContainer" containerID="e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.339055 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69cc8c865c-d9tkn"] Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.351086 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69cc8c865c-d9tkn"] Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.359312 4711 scope.go:117] "RemoveContainer" containerID="16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632" Dec 05 12:34:23 crc kubenswrapper[4711]: E1205 12:34:23.359849 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632\": container with ID starting with 16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632 not found: ID does not exist" containerID="16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.359897 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632"} err="failed to get container status \"16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632\": rpc error: code = NotFound desc = could not find container \"16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632\": container with ID starting with 16166848dedaf208b256d50d5b2cb36108d491e64125854444cfbdda9e8be632 not found: ID does not exist" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.359926 4711 scope.go:117] "RemoveContainer" containerID="e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000" Dec 05 12:34:23 crc kubenswrapper[4711]: E1205 12:34:23.360366 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000\": container with ID starting with e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000 not found: ID does not exist" containerID="e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.360420 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000"} err="failed to get container status \"e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000\": rpc error: code = NotFound desc = could not find container \"e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000\": container with ID starting with e07a08534469b4ff38407e205ecb4f9cf756de3378f9d8697cb63c326ff19000 not found: ID does not exist" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.384712 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.384759 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c86ec8c-e349-4b7b-a4c8-21778f352703-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:23 crc kubenswrapper[4711]: I1205 12:34:23.513438 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c85bff75-jnh4g"] Dec 05 12:34:23 crc kubenswrapper[4711]: W1205 12:34:23.525139 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54f1fb07_feb6_4d6e_8264_f74beb20b77e.slice/crio-c81dc1374881228adfb42025a6ada47901a7ffd7118cc849b211d738cf3b820f WatchSource:0}: Error finding container c81dc1374881228adfb42025a6ada47901a7ffd7118cc849b211d738cf3b820f: Status 404 returned error can't find the container with id c81dc1374881228adfb42025a6ada47901a7ffd7118cc849b211d738cf3b820f Dec 05 12:34:24 crc kubenswrapper[4711]: I1205 12:34:24.305403 4711 generic.go:334] "Generic (PLEG): container finished" podID="54f1fb07-feb6-4d6e-8264-f74beb20b77e" containerID="6a68c18e0499030d93741a5d65324f38114cd27162f3553d06c37ddde8c343c0" exitCode=0 Dec 05 12:34:24 crc kubenswrapper[4711]: I1205 12:34:24.305495 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" event={"ID":"54f1fb07-feb6-4d6e-8264-f74beb20b77e","Type":"ContainerDied","Data":"6a68c18e0499030d93741a5d65324f38114cd27162f3553d06c37ddde8c343c0"} Dec 05 12:34:24 crc kubenswrapper[4711]: I1205 12:34:24.305747 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" event={"ID":"54f1fb07-feb6-4d6e-8264-f74beb20b77e","Type":"ContainerStarted","Data":"c81dc1374881228adfb42025a6ada47901a7ffd7118cc849b211d738cf3b820f"} Dec 05 12:34:24 crc kubenswrapper[4711]: I1205 12:34:24.694575 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c86ec8c-e349-4b7b-a4c8-21778f352703" path="/var/lib/kubelet/pods/3c86ec8c-e349-4b7b-a4c8-21778f352703/volumes" Dec 05 12:34:25 crc kubenswrapper[4711]: I1205 12:34:25.319323 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" event={"ID":"54f1fb07-feb6-4d6e-8264-f74beb20b77e","Type":"ContainerStarted","Data":"6d400acc7e030fa71fb773fecd2505161ca2078272e42422797950bba0763dd8"} Dec 05 12:34:25 crc kubenswrapper[4711]: I1205 12:34:25.319702 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:25 crc kubenswrapper[4711]: I1205 12:34:25.348314 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" podStartSLOduration=3.34829344 podStartE2EDuration="3.34829344s" podCreationTimestamp="2025-12-05 12:34:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:34:25.340064415 +0000 UTC m=+1510.924386745" watchObservedRunningTime="2025-12-05 12:34:25.34829344 +0000 UTC m=+1510.932615770" Dec 05 12:34:32 crc kubenswrapper[4711]: I1205 12:34:32.948571 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c85bff75-jnh4g" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.021472 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d88795c5c-tx6t4"] Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.022099 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" podUID="b4d3c8b7-0400-4f17-a232-fdbe0902542f" containerName="dnsmasq-dns" containerID="cri-o://79abcf7e5b9ec49537cdbf63dc555c29d8ba17b75b69b519ae22fe46a19a8636" gracePeriod=10 Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.409354 4711 generic.go:334] "Generic (PLEG): container finished" podID="b4d3c8b7-0400-4f17-a232-fdbe0902542f" containerID="79abcf7e5b9ec49537cdbf63dc555c29d8ba17b75b69b519ae22fe46a19a8636" exitCode=0 Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.409416 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" event={"ID":"b4d3c8b7-0400-4f17-a232-fdbe0902542f","Type":"ContainerDied","Data":"79abcf7e5b9ec49537cdbf63dc555c29d8ba17b75b69b519ae22fe46a19a8636"} Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.530200 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.624038 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-swift-storage-0\") pod \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.625154 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7ggc\" (UniqueName: \"kubernetes.io/projected/b4d3c8b7-0400-4f17-a232-fdbe0902542f-kube-api-access-r7ggc\") pod \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.625766 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-openstack-edpm-ipam\") pod \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.625892 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-nb\") pod \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.625970 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-sb\") pod \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.626449 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-config\") pod \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.626482 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-svc\") pod \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\" (UID: \"b4d3c8b7-0400-4f17-a232-fdbe0902542f\") " Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.630931 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4d3c8b7-0400-4f17-a232-fdbe0902542f-kube-api-access-r7ggc" (OuterVolumeSpecName: "kube-api-access-r7ggc") pod "b4d3c8b7-0400-4f17-a232-fdbe0902542f" (UID: "b4d3c8b7-0400-4f17-a232-fdbe0902542f"). InnerVolumeSpecName "kube-api-access-r7ggc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.680097 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b4d3c8b7-0400-4f17-a232-fdbe0902542f" (UID: "b4d3c8b7-0400-4f17-a232-fdbe0902542f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.681959 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b4d3c8b7-0400-4f17-a232-fdbe0902542f" (UID: "b4d3c8b7-0400-4f17-a232-fdbe0902542f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.684564 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b4d3c8b7-0400-4f17-a232-fdbe0902542f" (UID: "b4d3c8b7-0400-4f17-a232-fdbe0902542f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.688065 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b4d3c8b7-0400-4f17-a232-fdbe0902542f" (UID: "b4d3c8b7-0400-4f17-a232-fdbe0902542f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.688813 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "b4d3c8b7-0400-4f17-a232-fdbe0902542f" (UID: "b4d3c8b7-0400-4f17-a232-fdbe0902542f"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.693767 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-config" (OuterVolumeSpecName: "config") pod "b4d3c8b7-0400-4f17-a232-fdbe0902542f" (UID: "b4d3c8b7-0400-4f17-a232-fdbe0902542f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.730318 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.730363 4711 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.730375 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.730402 4711 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.730414 4711 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.730426 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7ggc\" (UniqueName: \"kubernetes.io/projected/b4d3c8b7-0400-4f17-a232-fdbe0902542f-kube-api-access-r7ggc\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:33 crc kubenswrapper[4711]: I1205 12:34:33.730441 4711 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b4d3c8b7-0400-4f17-a232-fdbe0902542f-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:34 crc kubenswrapper[4711]: I1205 12:34:34.420992 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" event={"ID":"b4d3c8b7-0400-4f17-a232-fdbe0902542f","Type":"ContainerDied","Data":"d422a4186f44801e8519b39277f6ea1f4b65c6edfd82c49e433caba2986e0c79"} Dec 05 12:34:34 crc kubenswrapper[4711]: I1205 12:34:34.421051 4711 scope.go:117] "RemoveContainer" containerID="79abcf7e5b9ec49537cdbf63dc555c29d8ba17b75b69b519ae22fe46a19a8636" Dec 05 12:34:34 crc kubenswrapper[4711]: I1205 12:34:34.421083 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d88795c5c-tx6t4" Dec 05 12:34:34 crc kubenswrapper[4711]: I1205 12:34:34.451113 4711 scope.go:117] "RemoveContainer" containerID="3fec3264529252a717609eb042de31ee412868ef221484a0f9b3d7581d63a129" Dec 05 12:34:34 crc kubenswrapper[4711]: I1205 12:34:34.457608 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d88795c5c-tx6t4"] Dec 05 12:34:34 crc kubenswrapper[4711]: I1205 12:34:34.468931 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d88795c5c-tx6t4"] Dec 05 12:34:34 crc kubenswrapper[4711]: I1205 12:34:34.697878 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4d3c8b7-0400-4f17-a232-fdbe0902542f" path="/var/lib/kubelet/pods/b4d3c8b7-0400-4f17-a232-fdbe0902542f/volumes" Dec 05 12:34:40 crc kubenswrapper[4711]: I1205 12:34:40.484273 4711 generic.go:334] "Generic (PLEG): container finished" podID="314912fb-0c68-4fc7-9472-f84b1f0ab8cd" containerID="e2366c8d44311d1dab56e64b069ff42d4f6a07de38ad62b44959e017f8d5108c" exitCode=0 Dec 05 12:34:40 crc kubenswrapper[4711]: I1205 12:34:40.484412 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"314912fb-0c68-4fc7-9472-f84b1f0ab8cd","Type":"ContainerDied","Data":"e2366c8d44311d1dab56e64b069ff42d4f6a07de38ad62b44959e017f8d5108c"} Dec 05 12:34:40 crc kubenswrapper[4711]: I1205 12:34:40.489969 4711 generic.go:334] "Generic (PLEG): container finished" podID="1719e95a-bfa1-4302-b7f5-08acd0d41d93" containerID="ebd4d340f201def0519f94d5d27b283ff287acf7528e01b69476c48d01bf5cec" exitCode=0 Dec 05 12:34:40 crc kubenswrapper[4711]: I1205 12:34:40.490024 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1719e95a-bfa1-4302-b7f5-08acd0d41d93","Type":"ContainerDied","Data":"ebd4d340f201def0519f94d5d27b283ff287acf7528e01b69476c48d01bf5cec"} Dec 05 12:34:41 crc kubenswrapper[4711]: I1205 12:34:41.499668 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"314912fb-0c68-4fc7-9472-f84b1f0ab8cd","Type":"ContainerStarted","Data":"edd722417fa93d06e18e7b3373469211c30fcc2c2caddb1c03752ca20db873ba"} Dec 05 12:34:41 crc kubenswrapper[4711]: I1205 12:34:41.500810 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 12:34:41 crc kubenswrapper[4711]: I1205 12:34:41.502963 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1719e95a-bfa1-4302-b7f5-08acd0d41d93","Type":"ContainerStarted","Data":"f17ae94d02ba8d16455819b5d7a7ecf2b0123e7e6e69450bf0a57e2a227c219c"} Dec 05 12:34:41 crc kubenswrapper[4711]: I1205 12:34:41.503212 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:34:41 crc kubenswrapper[4711]: I1205 12:34:41.536625 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.536606153 podStartE2EDuration="36.536606153s" podCreationTimestamp="2025-12-05 12:34:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:34:41.528850969 +0000 UTC m=+1527.113173319" watchObservedRunningTime="2025-12-05 12:34:41.536606153 +0000 UTC m=+1527.120928493" Dec 05 12:34:41 crc kubenswrapper[4711]: I1205 12:34:41.565806 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.565780511 podStartE2EDuration="36.565780511s" podCreationTimestamp="2025-12-05 12:34:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:34:41.556897769 +0000 UTC m=+1527.141220119" watchObservedRunningTime="2025-12-05 12:34:41.565780511 +0000 UTC m=+1527.150102841" Dec 05 12:34:48 crc kubenswrapper[4711]: I1205 12:34:48.300791 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:34:48 crc kubenswrapper[4711]: I1205 12:34:48.301409 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:34:48 crc kubenswrapper[4711]: I1205 12:34:48.301477 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:34:48 crc kubenswrapper[4711]: I1205 12:34:48.302286 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:34:48 crc kubenswrapper[4711]: I1205 12:34:48.302349 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" gracePeriod=600 Dec 05 12:34:48 crc kubenswrapper[4711]: I1205 12:34:48.598515 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" exitCode=0 Dec 05 12:34:48 crc kubenswrapper[4711]: I1205 12:34:48.598603 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0"} Dec 05 12:34:48 crc kubenswrapper[4711]: I1205 12:34:48.598793 4711 scope.go:117] "RemoveContainer" containerID="39ed3b2d5509b3071d8cb2dc86698b520f2d0e9ed254bb880705f90278301c5c" Dec 05 12:34:48 crc kubenswrapper[4711]: E1205 12:34:48.681153 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:34:49 crc kubenswrapper[4711]: I1205 12:34:49.611278 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:34:49 crc kubenswrapper[4711]: E1205 12:34:49.612045 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.244553 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9"] Dec 05 12:34:51 crc kubenswrapper[4711]: E1205 12:34:51.245129 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d3c8b7-0400-4f17-a232-fdbe0902542f" containerName="dnsmasq-dns" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.245149 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d3c8b7-0400-4f17-a232-fdbe0902542f" containerName="dnsmasq-dns" Dec 05 12:34:51 crc kubenswrapper[4711]: E1205 12:34:51.245169 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c86ec8c-e349-4b7b-a4c8-21778f352703" containerName="init" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.245175 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c86ec8c-e349-4b7b-a4c8-21778f352703" containerName="init" Dec 05 12:34:51 crc kubenswrapper[4711]: E1205 12:34:51.245199 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c86ec8c-e349-4b7b-a4c8-21778f352703" containerName="dnsmasq-dns" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.245207 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c86ec8c-e349-4b7b-a4c8-21778f352703" containerName="dnsmasq-dns" Dec 05 12:34:51 crc kubenswrapper[4711]: E1205 12:34:51.245231 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d3c8b7-0400-4f17-a232-fdbe0902542f" containerName="init" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.245239 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d3c8b7-0400-4f17-a232-fdbe0902542f" containerName="init" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.245553 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4d3c8b7-0400-4f17-a232-fdbe0902542f" containerName="dnsmasq-dns" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.245585 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c86ec8c-e349-4b7b-a4c8-21778f352703" containerName="dnsmasq-dns" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.246480 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.249788 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.249895 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.250062 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.250577 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.255529 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9"] Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.281869 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.281947 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.282023 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn7pl\" (UniqueName: \"kubernetes.io/projected/91e39369-6074-48c3-bb1f-4b2d8b92243e-kube-api-access-fn7pl\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.282048 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.384005 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.384092 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.384171 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn7pl\" (UniqueName: \"kubernetes.io/projected/91e39369-6074-48c3-bb1f-4b2d8b92243e-kube-api-access-fn7pl\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.384203 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.389660 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.390127 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.390119 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.399257 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn7pl\" (UniqueName: \"kubernetes.io/projected/91e39369-6074-48c3-bb1f-4b2d8b92243e-kube-api-access-fn7pl\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:51 crc kubenswrapper[4711]: I1205 12:34:51.562913 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:34:52 crc kubenswrapper[4711]: I1205 12:34:52.398268 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9"] Dec 05 12:34:52 crc kubenswrapper[4711]: I1205 12:34:52.695009 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" event={"ID":"91e39369-6074-48c3-bb1f-4b2d8b92243e","Type":"ContainerStarted","Data":"567e3218146fba60a09b98ed824be001965e4ae9da28f782bf39941ff0443244"} Dec 05 12:34:55 crc kubenswrapper[4711]: I1205 12:34:55.457557 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="314912fb-0c68-4fc7-9472-f84b1f0ab8cd" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.225:5671: connect: connection refused" Dec 05 12:34:55 crc kubenswrapper[4711]: I1205 12:34:55.487543 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:35:04 crc kubenswrapper[4711]: I1205 12:35:04.683204 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:35:04 crc kubenswrapper[4711]: E1205 12:35:04.684087 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:35:04 crc kubenswrapper[4711]: I1205 12:35:04.842184 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" event={"ID":"91e39369-6074-48c3-bb1f-4b2d8b92243e","Type":"ContainerStarted","Data":"d6e4c5d3fbcf6d45818314050ca7edbdedc09d25794d93abe7a212857ea955d2"} Dec 05 12:35:04 crc kubenswrapper[4711]: I1205 12:35:04.865357 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" podStartSLOduration=2.095163415 podStartE2EDuration="13.865336976s" podCreationTimestamp="2025-12-05 12:34:51 +0000 UTC" firstStartedPulling="2025-12-05 12:34:52.407639515 +0000 UTC m=+1537.991961845" lastFinishedPulling="2025-12-05 12:35:04.177813076 +0000 UTC m=+1549.762135406" observedRunningTime="2025-12-05 12:35:04.856354631 +0000 UTC m=+1550.440676951" watchObservedRunningTime="2025-12-05 12:35:04.865336976 +0000 UTC m=+1550.449659306" Dec 05 12:35:05 crc kubenswrapper[4711]: I1205 12:35:05.457633 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.676648 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dcfbz"] Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.682967 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.689041 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dcfbz"] Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.817216 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hqwl\" (UniqueName: \"kubernetes.io/projected/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-kube-api-access-7hqwl\") pod \"redhat-operators-dcfbz\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.817571 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-catalog-content\") pod \"redhat-operators-dcfbz\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.817649 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-utilities\") pod \"redhat-operators-dcfbz\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.919628 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hqwl\" (UniqueName: \"kubernetes.io/projected/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-kube-api-access-7hqwl\") pod \"redhat-operators-dcfbz\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.919720 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-catalog-content\") pod \"redhat-operators-dcfbz\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.919804 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-utilities\") pod \"redhat-operators-dcfbz\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.920230 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-utilities\") pod \"redhat-operators-dcfbz\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.920331 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-catalog-content\") pod \"redhat-operators-dcfbz\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:09 crc kubenswrapper[4711]: I1205 12:35:09.942132 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hqwl\" (UniqueName: \"kubernetes.io/projected/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-kube-api-access-7hqwl\") pod \"redhat-operators-dcfbz\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:10 crc kubenswrapper[4711]: I1205 12:35:10.000780 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:10 crc kubenswrapper[4711]: I1205 12:35:10.474310 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dcfbz"] Dec 05 12:35:10 crc kubenswrapper[4711]: W1205 12:35:10.476301 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6665617c_95ea_4e0a_a90c_d84f5e0d9fbe.slice/crio-95bc2fb8e19e46389a2ea4cbb3039cd486eac1104b6c7371e09a1cbebc5df000 WatchSource:0}: Error finding container 95bc2fb8e19e46389a2ea4cbb3039cd486eac1104b6c7371e09a1cbebc5df000: Status 404 returned error can't find the container with id 95bc2fb8e19e46389a2ea4cbb3039cd486eac1104b6c7371e09a1cbebc5df000 Dec 05 12:35:10 crc kubenswrapper[4711]: I1205 12:35:10.895675 4711 generic.go:334] "Generic (PLEG): container finished" podID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerID="dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345" exitCode=0 Dec 05 12:35:10 crc kubenswrapper[4711]: I1205 12:35:10.895742 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dcfbz" event={"ID":"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe","Type":"ContainerDied","Data":"dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345"} Dec 05 12:35:10 crc kubenswrapper[4711]: I1205 12:35:10.895989 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dcfbz" event={"ID":"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe","Type":"ContainerStarted","Data":"95bc2fb8e19e46389a2ea4cbb3039cd486eac1104b6c7371e09a1cbebc5df000"} Dec 05 12:35:11 crc kubenswrapper[4711]: I1205 12:35:11.908026 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dcfbz" event={"ID":"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe","Type":"ContainerStarted","Data":"2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f"} Dec 05 12:35:15 crc kubenswrapper[4711]: I1205 12:35:15.952890 4711 generic.go:334] "Generic (PLEG): container finished" podID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerID="2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f" exitCode=0 Dec 05 12:35:15 crc kubenswrapper[4711]: I1205 12:35:15.953071 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dcfbz" event={"ID":"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe","Type":"ContainerDied","Data":"2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f"} Dec 05 12:35:16 crc kubenswrapper[4711]: I1205 12:35:16.965917 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dcfbz" event={"ID":"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe","Type":"ContainerStarted","Data":"368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6"} Dec 05 12:35:16 crc kubenswrapper[4711]: I1205 12:35:16.994566 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dcfbz" podStartSLOduration=2.515338957 podStartE2EDuration="7.994549313s" podCreationTimestamp="2025-12-05 12:35:09 +0000 UTC" firstStartedPulling="2025-12-05 12:35:10.897200037 +0000 UTC m=+1556.481522367" lastFinishedPulling="2025-12-05 12:35:16.376410393 +0000 UTC m=+1561.960732723" observedRunningTime="2025-12-05 12:35:16.984597744 +0000 UTC m=+1562.568920074" watchObservedRunningTime="2025-12-05 12:35:16.994549313 +0000 UTC m=+1562.578871643" Dec 05 12:35:19 crc kubenswrapper[4711]: I1205 12:35:19.002331 4711 generic.go:334] "Generic (PLEG): container finished" podID="91e39369-6074-48c3-bb1f-4b2d8b92243e" containerID="d6e4c5d3fbcf6d45818314050ca7edbdedc09d25794d93abe7a212857ea955d2" exitCode=0 Dec 05 12:35:19 crc kubenswrapper[4711]: I1205 12:35:19.002880 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" event={"ID":"91e39369-6074-48c3-bb1f-4b2d8b92243e","Type":"ContainerDied","Data":"d6e4c5d3fbcf6d45818314050ca7edbdedc09d25794d93abe7a212857ea955d2"} Dec 05 12:35:19 crc kubenswrapper[4711]: I1205 12:35:19.683506 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:35:19 crc kubenswrapper[4711]: E1205 12:35:19.683787 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.001496 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.001555 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.518581 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.637686 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-repo-setup-combined-ca-bundle\") pod \"91e39369-6074-48c3-bb1f-4b2d8b92243e\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.638167 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-ssh-key\") pod \"91e39369-6074-48c3-bb1f-4b2d8b92243e\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.638253 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-inventory\") pod \"91e39369-6074-48c3-bb1f-4b2d8b92243e\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.638304 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fn7pl\" (UniqueName: \"kubernetes.io/projected/91e39369-6074-48c3-bb1f-4b2d8b92243e-kube-api-access-fn7pl\") pod \"91e39369-6074-48c3-bb1f-4b2d8b92243e\" (UID: \"91e39369-6074-48c3-bb1f-4b2d8b92243e\") " Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.643825 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91e39369-6074-48c3-bb1f-4b2d8b92243e-kube-api-access-fn7pl" (OuterVolumeSpecName: "kube-api-access-fn7pl") pod "91e39369-6074-48c3-bb1f-4b2d8b92243e" (UID: "91e39369-6074-48c3-bb1f-4b2d8b92243e"). InnerVolumeSpecName "kube-api-access-fn7pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.657550 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "91e39369-6074-48c3-bb1f-4b2d8b92243e" (UID: "91e39369-6074-48c3-bb1f-4b2d8b92243e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.674128 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "91e39369-6074-48c3-bb1f-4b2d8b92243e" (UID: "91e39369-6074-48c3-bb1f-4b2d8b92243e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.686675 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-inventory" (OuterVolumeSpecName: "inventory") pod "91e39369-6074-48c3-bb1f-4b2d8b92243e" (UID: "91e39369-6074-48c3-bb1f-4b2d8b92243e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.742899 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.742968 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fn7pl\" (UniqueName: \"kubernetes.io/projected/91e39369-6074-48c3-bb1f-4b2d8b92243e-kube-api-access-fn7pl\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.742986 4711 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:20 crc kubenswrapper[4711]: I1205 12:35:20.742999 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91e39369-6074-48c3-bb1f-4b2d8b92243e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.022043 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" event={"ID":"91e39369-6074-48c3-bb1f-4b2d8b92243e","Type":"ContainerDied","Data":"567e3218146fba60a09b98ed824be001965e4ae9da28f782bf39941ff0443244"} Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.022081 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="567e3218146fba60a09b98ed824be001965e4ae9da28f782bf39941ff0443244" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.022118 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.061548 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dcfbz" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerName="registry-server" probeResult="failure" output=< Dec 05 12:35:21 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 12:35:21 crc kubenswrapper[4711]: > Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.144206 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx"] Dec 05 12:35:21 crc kubenswrapper[4711]: E1205 12:35:21.145002 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e39369-6074-48c3-bb1f-4b2d8b92243e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.145028 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e39369-6074-48c3-bb1f-4b2d8b92243e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.145337 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e39369-6074-48c3-bb1f-4b2d8b92243e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.146221 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.148679 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.148710 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.148674 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.150217 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.163622 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx"] Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.252602 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cs9sx\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.252859 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twvm9\" (UniqueName: \"kubernetes.io/projected/4c833386-c174-487a-8044-9eb7c8fc7cb0-kube-api-access-twvm9\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cs9sx\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.253024 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cs9sx\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.354671 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cs9sx\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.354770 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twvm9\" (UniqueName: \"kubernetes.io/projected/4c833386-c174-487a-8044-9eb7c8fc7cb0-kube-api-access-twvm9\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cs9sx\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.354837 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cs9sx\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.358707 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cs9sx\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.358942 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cs9sx\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.371200 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twvm9\" (UniqueName: \"kubernetes.io/projected/4c833386-c174-487a-8044-9eb7c8fc7cb0-kube-api-access-twvm9\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cs9sx\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:21 crc kubenswrapper[4711]: I1205 12:35:21.465023 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:22 crc kubenswrapper[4711]: I1205 12:35:22.177683 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx"] Dec 05 12:35:23 crc kubenswrapper[4711]: I1205 12:35:23.043490 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" event={"ID":"4c833386-c174-487a-8044-9eb7c8fc7cb0","Type":"ContainerStarted","Data":"2ca4941958f296abd93e9f7dca29de4b031f0ec37bafe7825c543911fcd95407"} Dec 05 12:35:24 crc kubenswrapper[4711]: I1205 12:35:24.053420 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" event={"ID":"4c833386-c174-487a-8044-9eb7c8fc7cb0","Type":"ContainerStarted","Data":"a7fcb1edc38d45a3d70a546e64632363a7167c0f96822c442a40a59e90838a17"} Dec 05 12:35:24 crc kubenswrapper[4711]: I1205 12:35:24.073030 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" podStartSLOduration=2.5865691870000003 podStartE2EDuration="3.073007201s" podCreationTimestamp="2025-12-05 12:35:21 +0000 UTC" firstStartedPulling="2025-12-05 12:35:22.184196315 +0000 UTC m=+1567.768518645" lastFinishedPulling="2025-12-05 12:35:22.670634329 +0000 UTC m=+1568.254956659" observedRunningTime="2025-12-05 12:35:24.069803111 +0000 UTC m=+1569.654125481" watchObservedRunningTime="2025-12-05 12:35:24.073007201 +0000 UTC m=+1569.657329531" Dec 05 12:35:26 crc kubenswrapper[4711]: I1205 12:35:26.082046 4711 generic.go:334] "Generic (PLEG): container finished" podID="4c833386-c174-487a-8044-9eb7c8fc7cb0" containerID="a7fcb1edc38d45a3d70a546e64632363a7167c0f96822c442a40a59e90838a17" exitCode=0 Dec 05 12:35:26 crc kubenswrapper[4711]: I1205 12:35:26.082444 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" event={"ID":"4c833386-c174-487a-8044-9eb7c8fc7cb0","Type":"ContainerDied","Data":"a7fcb1edc38d45a3d70a546e64632363a7167c0f96822c442a40a59e90838a17"} Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.503762 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.578440 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twvm9\" (UniqueName: \"kubernetes.io/projected/4c833386-c174-487a-8044-9eb7c8fc7cb0-kube-api-access-twvm9\") pod \"4c833386-c174-487a-8044-9eb7c8fc7cb0\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.578528 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-inventory\") pod \"4c833386-c174-487a-8044-9eb7c8fc7cb0\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.578623 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-ssh-key\") pod \"4c833386-c174-487a-8044-9eb7c8fc7cb0\" (UID: \"4c833386-c174-487a-8044-9eb7c8fc7cb0\") " Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.588082 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c833386-c174-487a-8044-9eb7c8fc7cb0-kube-api-access-twvm9" (OuterVolumeSpecName: "kube-api-access-twvm9") pod "4c833386-c174-487a-8044-9eb7c8fc7cb0" (UID: "4c833386-c174-487a-8044-9eb7c8fc7cb0"). InnerVolumeSpecName "kube-api-access-twvm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.618877 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-inventory" (OuterVolumeSpecName: "inventory") pod "4c833386-c174-487a-8044-9eb7c8fc7cb0" (UID: "4c833386-c174-487a-8044-9eb7c8fc7cb0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.621081 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4c833386-c174-487a-8044-9eb7c8fc7cb0" (UID: "4c833386-c174-487a-8044-9eb7c8fc7cb0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.682282 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twvm9\" (UniqueName: \"kubernetes.io/projected/4c833386-c174-487a-8044-9eb7c8fc7cb0-kube-api-access-twvm9\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.682318 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.682327 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c833386-c174-487a-8044-9eb7c8fc7cb0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:27 crc kubenswrapper[4711]: I1205 12:35:27.972056 4711 scope.go:117] "RemoveContainer" containerID="14e3a4dc74c78be10689e9531392db409015217d81330978947af90ce33ec14d" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.011640 4711 scope.go:117] "RemoveContainer" containerID="ab32dc5d8a5a2e81d3e113bfab5b59ffce9256d55aeb06cadaac5fddb318ab74" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.099141 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" event={"ID":"4c833386-c174-487a-8044-9eb7c8fc7cb0","Type":"ContainerDied","Data":"2ca4941958f296abd93e9f7dca29de4b031f0ec37bafe7825c543911fcd95407"} Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.099185 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cs9sx" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.099195 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ca4941958f296abd93e9f7dca29de4b031f0ec37bafe7825c543911fcd95407" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.177277 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7"] Dec 05 12:35:28 crc kubenswrapper[4711]: E1205 12:35:28.177879 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c833386-c174-487a-8044-9eb7c8fc7cb0" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.177901 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c833386-c174-487a-8044-9eb7c8fc7cb0" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.178121 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c833386-c174-487a-8044-9eb7c8fc7cb0" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.178921 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.193118 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7"] Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.221506 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.221853 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.222730 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.222892 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.295001 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.295086 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.295209 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.295299 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5chhd\" (UniqueName: \"kubernetes.io/projected/9ed39ddf-9274-44fa-8267-59d9c8f1447a-kube-api-access-5chhd\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.397652 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.397736 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.397815 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.397889 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5chhd\" (UniqueName: \"kubernetes.io/projected/9ed39ddf-9274-44fa-8267-59d9c8f1447a-kube-api-access-5chhd\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.402303 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.402842 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.403620 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.419497 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5chhd\" (UniqueName: \"kubernetes.io/projected/9ed39ddf-9274-44fa-8267-59d9c8f1447a-kube-api-access-5chhd\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:28 crc kubenswrapper[4711]: I1205 12:35:28.547212 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:35:29 crc kubenswrapper[4711]: I1205 12:35:29.083420 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7"] Dec 05 12:35:29 crc kubenswrapper[4711]: I1205 12:35:29.110912 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" event={"ID":"9ed39ddf-9274-44fa-8267-59d9c8f1447a","Type":"ContainerStarted","Data":"2462869a9945f6cffded74c8b8dabbd8f92f180e0303bd19a7908b36f93412cd"} Dec 05 12:35:30 crc kubenswrapper[4711]: I1205 12:35:30.066725 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:30 crc kubenswrapper[4711]: I1205 12:35:30.130876 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:30 crc kubenswrapper[4711]: I1205 12:35:30.134639 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" event={"ID":"9ed39ddf-9274-44fa-8267-59d9c8f1447a","Type":"ContainerStarted","Data":"e18dc0e21d1783f1ffa40e03f1aeced23069c71ad506288d3851a203b286bbd8"} Dec 05 12:35:30 crc kubenswrapper[4711]: I1205 12:35:30.186463 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" podStartSLOduration=1.600025489 podStartE2EDuration="2.184242172s" podCreationTimestamp="2025-12-05 12:35:28 +0000 UTC" firstStartedPulling="2025-12-05 12:35:29.094031917 +0000 UTC m=+1574.678354247" lastFinishedPulling="2025-12-05 12:35:29.6782486 +0000 UTC m=+1575.262570930" observedRunningTime="2025-12-05 12:35:30.166980141 +0000 UTC m=+1575.751302471" watchObservedRunningTime="2025-12-05 12:35:30.184242172 +0000 UTC m=+1575.768564502" Dec 05 12:35:30 crc kubenswrapper[4711]: I1205 12:35:30.306673 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dcfbz"] Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.142259 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dcfbz" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerName="registry-server" containerID="cri-o://368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6" gracePeriod=2 Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.644991 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.764687 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-utilities\") pod \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.764745 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hqwl\" (UniqueName: \"kubernetes.io/projected/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-kube-api-access-7hqwl\") pod \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.764841 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-catalog-content\") pod \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\" (UID: \"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe\") " Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.765561 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-utilities" (OuterVolumeSpecName: "utilities") pod "6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" (UID: "6665617c-95ea-4e0a-a90c-d84f5e0d9fbe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.770636 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-kube-api-access-7hqwl" (OuterVolumeSpecName: "kube-api-access-7hqwl") pod "6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" (UID: "6665617c-95ea-4e0a-a90c-d84f5e0d9fbe"). InnerVolumeSpecName "kube-api-access-7hqwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.867551 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.867597 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hqwl\" (UniqueName: \"kubernetes.io/projected/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-kube-api-access-7hqwl\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.870730 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" (UID: "6665617c-95ea-4e0a-a90c-d84f5e0d9fbe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:35:31 crc kubenswrapper[4711]: I1205 12:35:31.969510 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.168244 4711 generic.go:334] "Generic (PLEG): container finished" podID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerID="368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6" exitCode=0 Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.168317 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dcfbz" event={"ID":"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe","Type":"ContainerDied","Data":"368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6"} Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.168355 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dcfbz" event={"ID":"6665617c-95ea-4e0a-a90c-d84f5e0d9fbe","Type":"ContainerDied","Data":"95bc2fb8e19e46389a2ea4cbb3039cd486eac1104b6c7371e09a1cbebc5df000"} Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.168360 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dcfbz" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.168374 4711 scope.go:117] "RemoveContainer" containerID="368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.200550 4711 scope.go:117] "RemoveContainer" containerID="2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.221193 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dcfbz"] Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.232705 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dcfbz"] Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.241187 4711 scope.go:117] "RemoveContainer" containerID="dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.274075 4711 scope.go:117] "RemoveContainer" containerID="368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6" Dec 05 12:35:32 crc kubenswrapper[4711]: E1205 12:35:32.274925 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6\": container with ID starting with 368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6 not found: ID does not exist" containerID="368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.274967 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6"} err="failed to get container status \"368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6\": rpc error: code = NotFound desc = could not find container \"368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6\": container with ID starting with 368a6a89578d1d4401e4b982998ebbc6b7ff643b31d21ed992c533da6c78c3f6 not found: ID does not exist" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.274988 4711 scope.go:117] "RemoveContainer" containerID="2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f" Dec 05 12:35:32 crc kubenswrapper[4711]: E1205 12:35:32.275440 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f\": container with ID starting with 2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f not found: ID does not exist" containerID="2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.275459 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f"} err="failed to get container status \"2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f\": rpc error: code = NotFound desc = could not find container \"2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f\": container with ID starting with 2c644a6149e3351b8066f8e0743bc7b50e126ce0f3889eacba1250d79f10454f not found: ID does not exist" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.275475 4711 scope.go:117] "RemoveContainer" containerID="dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345" Dec 05 12:35:32 crc kubenswrapper[4711]: E1205 12:35:32.275758 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345\": container with ID starting with dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345 not found: ID does not exist" containerID="dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.275781 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345"} err="failed to get container status \"dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345\": rpc error: code = NotFound desc = could not find container \"dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345\": container with ID starting with dcc7c7f6f5a95ab90ea3070ae20a56df7bbf61afb21b8c9f77a32ac8efc40345 not found: ID does not exist" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.683124 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:35:32 crc kubenswrapper[4711]: E1205 12:35:32.683916 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:35:32 crc kubenswrapper[4711]: I1205 12:35:32.696400 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" path="/var/lib/kubelet/pods/6665617c-95ea-4e0a-a90c-d84f5e0d9fbe/volumes" Dec 05 12:35:43 crc kubenswrapper[4711]: I1205 12:35:43.683723 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:35:43 crc kubenswrapper[4711]: E1205 12:35:43.685082 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:35:54 crc kubenswrapper[4711]: I1205 12:35:54.684012 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:35:54 crc kubenswrapper[4711]: E1205 12:35:54.684995 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:36:07 crc kubenswrapper[4711]: I1205 12:36:07.683355 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:36:07 crc kubenswrapper[4711]: E1205 12:36:07.684119 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:36:20 crc kubenswrapper[4711]: I1205 12:36:20.683960 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:36:20 crc kubenswrapper[4711]: E1205 12:36:20.684784 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:36:28 crc kubenswrapper[4711]: I1205 12:36:28.106227 4711 scope.go:117] "RemoveContainer" containerID="a6f51066ef8a4a296da4c8ab1e2110be7b51674e93abe5812d8dd2a9a63c0376" Dec 05 12:36:28 crc kubenswrapper[4711]: I1205 12:36:28.142251 4711 scope.go:117] "RemoveContainer" containerID="8401a42463d7e3b9de96ab55c5591ddf4fa85e49ebee1db2e3b54dd66608ea9d" Dec 05 12:36:32 crc kubenswrapper[4711]: I1205 12:36:32.683239 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:36:32 crc kubenswrapper[4711]: E1205 12:36:32.684208 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:36:43 crc kubenswrapper[4711]: I1205 12:36:43.683737 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:36:43 crc kubenswrapper[4711]: E1205 12:36:43.684937 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:36:57 crc kubenswrapper[4711]: I1205 12:36:57.683281 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:36:57 crc kubenswrapper[4711]: E1205 12:36:57.684062 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:37:10 crc kubenswrapper[4711]: I1205 12:37:10.682938 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:37:10 crc kubenswrapper[4711]: E1205 12:37:10.683769 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:37:25 crc kubenswrapper[4711]: I1205 12:37:25.683491 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:37:25 crc kubenswrapper[4711]: E1205 12:37:25.684341 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:37:28 crc kubenswrapper[4711]: I1205 12:37:28.214637 4711 scope.go:117] "RemoveContainer" containerID="6cdeee827948dd7e8f94362c9b454e0150d5a06e230c371e7dec0fe77f76b54b" Dec 05 12:37:28 crc kubenswrapper[4711]: I1205 12:37:28.242076 4711 scope.go:117] "RemoveContainer" containerID="4bb99b0c027471f113ed1a596bfa6040945db8778af22edacf1f3ba44dd48521" Dec 05 12:37:39 crc kubenswrapper[4711]: I1205 12:37:39.682751 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:37:39 crc kubenswrapper[4711]: E1205 12:37:39.683820 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:37:52 crc kubenswrapper[4711]: I1205 12:37:52.684073 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:37:52 crc kubenswrapper[4711]: E1205 12:37:52.684874 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:38:06 crc kubenswrapper[4711]: I1205 12:38:06.683965 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:38:06 crc kubenswrapper[4711]: E1205 12:38:06.684772 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:38:18 crc kubenswrapper[4711]: I1205 12:38:18.693400 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:38:18 crc kubenswrapper[4711]: E1205 12:38:18.694174 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.384202 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wjtdk"] Dec 05 12:38:22 crc kubenswrapper[4711]: E1205 12:38:22.385238 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerName="extract-utilities" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.385257 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerName="extract-utilities" Dec 05 12:38:22 crc kubenswrapper[4711]: E1205 12:38:22.385272 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerName="extract-content" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.385280 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerName="extract-content" Dec 05 12:38:22 crc kubenswrapper[4711]: E1205 12:38:22.385297 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerName="registry-server" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.385304 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerName="registry-server" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.385594 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6665617c-95ea-4e0a-a90c-d84f5e0d9fbe" containerName="registry-server" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.387113 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.404562 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wjtdk"] Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.442862 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksmk8\" (UniqueName: \"kubernetes.io/projected/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-kube-api-access-ksmk8\") pod \"certified-operators-wjtdk\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.442917 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-catalog-content\") pod \"certified-operators-wjtdk\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.443063 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-utilities\") pod \"certified-operators-wjtdk\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.544583 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksmk8\" (UniqueName: \"kubernetes.io/projected/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-kube-api-access-ksmk8\") pod \"certified-operators-wjtdk\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.545022 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-catalog-content\") pod \"certified-operators-wjtdk\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.545158 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-utilities\") pod \"certified-operators-wjtdk\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.545428 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-catalog-content\") pod \"certified-operators-wjtdk\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.545523 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-utilities\") pod \"certified-operators-wjtdk\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.561616 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksmk8\" (UniqueName: \"kubernetes.io/projected/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-kube-api-access-ksmk8\") pod \"certified-operators-wjtdk\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:22 crc kubenswrapper[4711]: I1205 12:38:22.707304 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:23 crc kubenswrapper[4711]: I1205 12:38:23.175344 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wjtdk"] Dec 05 12:38:24 crc kubenswrapper[4711]: I1205 12:38:24.192924 4711 generic.go:334] "Generic (PLEG): container finished" podID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerID="cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f" exitCode=0 Dec 05 12:38:24 crc kubenswrapper[4711]: I1205 12:38:24.193036 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wjtdk" event={"ID":"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b","Type":"ContainerDied","Data":"cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f"} Dec 05 12:38:24 crc kubenswrapper[4711]: I1205 12:38:24.193831 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wjtdk" event={"ID":"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b","Type":"ContainerStarted","Data":"fa8d536f26dc3bf6e7a20e7ed8aec6e124541216b23613085fedb4c9e103c2cc"} Dec 05 12:38:24 crc kubenswrapper[4711]: I1205 12:38:24.210140 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:38:25 crc kubenswrapper[4711]: I1205 12:38:25.204308 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wjtdk" event={"ID":"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b","Type":"ContainerStarted","Data":"07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e"} Dec 05 12:38:26 crc kubenswrapper[4711]: I1205 12:38:26.217582 4711 generic.go:334] "Generic (PLEG): container finished" podID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerID="07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e" exitCode=0 Dec 05 12:38:26 crc kubenswrapper[4711]: I1205 12:38:26.217622 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wjtdk" event={"ID":"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b","Type":"ContainerDied","Data":"07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e"} Dec 05 12:38:27 crc kubenswrapper[4711]: I1205 12:38:27.231569 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wjtdk" event={"ID":"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b","Type":"ContainerStarted","Data":"1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6"} Dec 05 12:38:27 crc kubenswrapper[4711]: I1205 12:38:27.251207 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wjtdk" podStartSLOduration=2.775842397 podStartE2EDuration="5.2511848s" podCreationTimestamp="2025-12-05 12:38:22 +0000 UTC" firstStartedPulling="2025-12-05 12:38:24.209727091 +0000 UTC m=+1749.794049421" lastFinishedPulling="2025-12-05 12:38:26.685069494 +0000 UTC m=+1752.269391824" observedRunningTime="2025-12-05 12:38:27.248530196 +0000 UTC m=+1752.832852536" watchObservedRunningTime="2025-12-05 12:38:27.2511848 +0000 UTC m=+1752.835507150" Dec 05 12:38:31 crc kubenswrapper[4711]: I1205 12:38:31.684134 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:38:31 crc kubenswrapper[4711]: E1205 12:38:31.685245 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:38:32 crc kubenswrapper[4711]: I1205 12:38:32.707807 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:32 crc kubenswrapper[4711]: I1205 12:38:32.707881 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:32 crc kubenswrapper[4711]: I1205 12:38:32.767377 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:33 crc kubenswrapper[4711]: I1205 12:38:33.348090 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:33 crc kubenswrapper[4711]: I1205 12:38:33.395375 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wjtdk"] Dec 05 12:38:35 crc kubenswrapper[4711]: I1205 12:38:35.299888 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wjtdk" podUID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerName="registry-server" containerID="cri-o://1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6" gracePeriod=2 Dec 05 12:38:35 crc kubenswrapper[4711]: I1205 12:38:35.762869 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:35 crc kubenswrapper[4711]: I1205 12:38:35.908910 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-catalog-content\") pod \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " Dec 05 12:38:35 crc kubenswrapper[4711]: I1205 12:38:35.909272 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksmk8\" (UniqueName: \"kubernetes.io/projected/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-kube-api-access-ksmk8\") pod \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " Dec 05 12:38:35 crc kubenswrapper[4711]: I1205 12:38:35.909316 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-utilities\") pod \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\" (UID: \"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b\") " Dec 05 12:38:35 crc kubenswrapper[4711]: I1205 12:38:35.910128 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-utilities" (OuterVolumeSpecName: "utilities") pod "a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" (UID: "a8f12a15-731e-40dd-ad7b-5514f0c6cf8b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:38:35 crc kubenswrapper[4711]: I1205 12:38:35.927797 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-kube-api-access-ksmk8" (OuterVolumeSpecName: "kube-api-access-ksmk8") pod "a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" (UID: "a8f12a15-731e-40dd-ad7b-5514f0c6cf8b"). InnerVolumeSpecName "kube-api-access-ksmk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.011655 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksmk8\" (UniqueName: \"kubernetes.io/projected/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-kube-api-access-ksmk8\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.011689 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.136232 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" (UID: "a8f12a15-731e-40dd-ad7b-5514f0c6cf8b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.214754 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.310437 4711 generic.go:334] "Generic (PLEG): container finished" podID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerID="1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6" exitCode=0 Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.310487 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wjtdk" event={"ID":"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b","Type":"ContainerDied","Data":"1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6"} Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.310492 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wjtdk" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.310511 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wjtdk" event={"ID":"a8f12a15-731e-40dd-ad7b-5514f0c6cf8b","Type":"ContainerDied","Data":"fa8d536f26dc3bf6e7a20e7ed8aec6e124541216b23613085fedb4c9e103c2cc"} Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.310526 4711 scope.go:117] "RemoveContainer" containerID="1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.338872 4711 scope.go:117] "RemoveContainer" containerID="07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.343739 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wjtdk"] Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.352633 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wjtdk"] Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.362278 4711 scope.go:117] "RemoveContainer" containerID="cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.416030 4711 scope.go:117] "RemoveContainer" containerID="1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6" Dec 05 12:38:36 crc kubenswrapper[4711]: E1205 12:38:36.416614 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6\": container with ID starting with 1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6 not found: ID does not exist" containerID="1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.416665 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6"} err="failed to get container status \"1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6\": rpc error: code = NotFound desc = could not find container \"1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6\": container with ID starting with 1e90c16a795120364ce61dd93c91593e03aec2ac3f1379dce879bd335e8f02a6 not found: ID does not exist" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.416694 4711 scope.go:117] "RemoveContainer" containerID="07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e" Dec 05 12:38:36 crc kubenswrapper[4711]: E1205 12:38:36.417085 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e\": container with ID starting with 07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e not found: ID does not exist" containerID="07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.417117 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e"} err="failed to get container status \"07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e\": rpc error: code = NotFound desc = could not find container \"07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e\": container with ID starting with 07830e2da96b9fd716dc36652ff6b63aa4e403822f15c1e07a69dba657a6703e not found: ID does not exist" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.417130 4711 scope.go:117] "RemoveContainer" containerID="cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f" Dec 05 12:38:36 crc kubenswrapper[4711]: E1205 12:38:36.417356 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f\": container with ID starting with cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f not found: ID does not exist" containerID="cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.417405 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f"} err="failed to get container status \"cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f\": rpc error: code = NotFound desc = could not find container \"cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f\": container with ID starting with cc4b3d7fe6b25cb8d4504e39254f9d4536878e6a0853abe2d1abc4f9476e8b4f not found: ID does not exist" Dec 05 12:38:36 crc kubenswrapper[4711]: I1205 12:38:36.694838 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" path="/var/lib/kubelet/pods/a8f12a15-731e-40dd-ad7b-5514f0c6cf8b/volumes" Dec 05 12:38:46 crc kubenswrapper[4711]: I1205 12:38:46.046012 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-9d6hb"] Dec 05 12:38:46 crc kubenswrapper[4711]: I1205 12:38:46.056533 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-b300-account-create-update-nd6hj"] Dec 05 12:38:46 crc kubenswrapper[4711]: I1205 12:38:46.066497 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-9d6hb"] Dec 05 12:38:46 crc kubenswrapper[4711]: I1205 12:38:46.076273 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-b300-account-create-update-nd6hj"] Dec 05 12:38:46 crc kubenswrapper[4711]: I1205 12:38:46.683366 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:38:46 crc kubenswrapper[4711]: E1205 12:38:46.683671 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:38:46 crc kubenswrapper[4711]: I1205 12:38:46.695327 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="697913df-02ee-441f-a751-ba53f26158ed" path="/var/lib/kubelet/pods/697913df-02ee-441f-a751-ba53f26158ed/volumes" Dec 05 12:38:46 crc kubenswrapper[4711]: I1205 12:38:46.696180 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73d6ddd8-359e-4d13-884f-2d3808250318" path="/var/lib/kubelet/pods/73d6ddd8-359e-4d13-884f-2d3808250318/volumes" Dec 05 12:38:53 crc kubenswrapper[4711]: I1205 12:38:53.029901 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-31c6-account-create-update-988z6"] Dec 05 12:38:53 crc kubenswrapper[4711]: I1205 12:38:53.042102 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-z6v79"] Dec 05 12:38:53 crc kubenswrapper[4711]: I1205 12:38:53.050700 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-z6v79"] Dec 05 12:38:53 crc kubenswrapper[4711]: I1205 12:38:53.060277 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-31c6-account-create-update-988z6"] Dec 05 12:38:54 crc kubenswrapper[4711]: I1205 12:38:54.047851 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8d9f-account-create-update-jcgcd"] Dec 05 12:38:54 crc kubenswrapper[4711]: I1205 12:38:54.059140 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-rqxct"] Dec 05 12:38:54 crc kubenswrapper[4711]: I1205 12:38:54.069352 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-8d9f-account-create-update-jcgcd"] Dec 05 12:38:54 crc kubenswrapper[4711]: I1205 12:38:54.080456 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-rqxct"] Dec 05 12:38:54 crc kubenswrapper[4711]: I1205 12:38:54.697090 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="107c5bb6-4c54-4e09-9dc5-2f777321d66c" path="/var/lib/kubelet/pods/107c5bb6-4c54-4e09-9dc5-2f777321d66c/volumes" Dec 05 12:38:55 crc kubenswrapper[4711]: I1205 12:38:54.698130 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e40033e-bd5c-45ee-b9d9-5c02304fd76d" path="/var/lib/kubelet/pods/2e40033e-bd5c-45ee-b9d9-5c02304fd76d/volumes" Dec 05 12:38:55 crc kubenswrapper[4711]: I1205 12:38:54.698769 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b66d472-e836-4788-a4f5-2cfc3b269e24" path="/var/lib/kubelet/pods/4b66d472-e836-4788-a4f5-2cfc3b269e24/volumes" Dec 05 12:38:55 crc kubenswrapper[4711]: I1205 12:38:54.699447 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66eadbd3-94ed-46b4-abb4-7a9ead434641" path="/var/lib/kubelet/pods/66eadbd3-94ed-46b4-abb4-7a9ead434641/volumes" Dec 05 12:38:55 crc kubenswrapper[4711]: E1205 12:38:55.687673 4711 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.005s" Dec 05 12:38:59 crc kubenswrapper[4711]: I1205 12:38:59.683314 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:38:59 crc kubenswrapper[4711]: E1205 12:38:59.684050 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:39:05 crc kubenswrapper[4711]: I1205 12:39:05.038706 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-4htss"] Dec 05 12:39:05 crc kubenswrapper[4711]: I1205 12:39:05.053320 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-795fh"] Dec 05 12:39:05 crc kubenswrapper[4711]: I1205 12:39:05.065217 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-bcc9-account-create-update-qjdq2"] Dec 05 12:39:05 crc kubenswrapper[4711]: I1205 12:39:05.076281 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-4htss"] Dec 05 12:39:05 crc kubenswrapper[4711]: I1205 12:39:05.086557 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-bcc9-account-create-update-qjdq2"] Dec 05 12:39:05 crc kubenswrapper[4711]: I1205 12:39:05.096965 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-93db-account-create-update-ft8x5"] Dec 05 12:39:05 crc kubenswrapper[4711]: I1205 12:39:05.106708 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-795fh"] Dec 05 12:39:05 crc kubenswrapper[4711]: I1205 12:39:05.116443 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-93db-account-create-update-ft8x5"] Dec 05 12:39:06 crc kubenswrapper[4711]: I1205 12:39:06.696322 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d52e95e-2ce4-4ea1-a9c3-50360150e675" path="/var/lib/kubelet/pods/2d52e95e-2ce4-4ea1-a9c3-50360150e675/volumes" Dec 05 12:39:06 crc kubenswrapper[4711]: I1205 12:39:06.697211 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="500de779-f104-4141-87e8-95e80e23a870" path="/var/lib/kubelet/pods/500de779-f104-4141-87e8-95e80e23a870/volumes" Dec 05 12:39:06 crc kubenswrapper[4711]: I1205 12:39:06.697949 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e4b2ca5-1164-478f-b5cd-b3a553a5100e" path="/var/lib/kubelet/pods/7e4b2ca5-1164-478f-b5cd-b3a553a5100e/volumes" Dec 05 12:39:06 crc kubenswrapper[4711]: I1205 12:39:06.698804 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bfdb3c4-d406-4d00-b405-2216fbe95943" path="/var/lib/kubelet/pods/8bfdb3c4-d406-4d00-b405-2216fbe95943/volumes" Dec 05 12:39:11 crc kubenswrapper[4711]: I1205 12:39:11.683746 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:39:11 crc kubenswrapper[4711]: E1205 12:39:11.684612 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:39:13 crc kubenswrapper[4711]: I1205 12:39:13.658484 4711 generic.go:334] "Generic (PLEG): container finished" podID="9ed39ddf-9274-44fa-8267-59d9c8f1447a" containerID="e18dc0e21d1783f1ffa40e03f1aeced23069c71ad506288d3851a203b286bbd8" exitCode=0 Dec 05 12:39:13 crc kubenswrapper[4711]: I1205 12:39:13.658535 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" event={"ID":"9ed39ddf-9274-44fa-8267-59d9c8f1447a","Type":"ContainerDied","Data":"e18dc0e21d1783f1ffa40e03f1aeced23069c71ad506288d3851a203b286bbd8"} Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.089215 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.211978 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-inventory\") pod \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.212111 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-bootstrap-combined-ca-bundle\") pod \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.212147 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5chhd\" (UniqueName: \"kubernetes.io/projected/9ed39ddf-9274-44fa-8267-59d9c8f1447a-kube-api-access-5chhd\") pod \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.212232 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-ssh-key\") pod \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\" (UID: \"9ed39ddf-9274-44fa-8267-59d9c8f1447a\") " Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.218606 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ed39ddf-9274-44fa-8267-59d9c8f1447a-kube-api-access-5chhd" (OuterVolumeSpecName: "kube-api-access-5chhd") pod "9ed39ddf-9274-44fa-8267-59d9c8f1447a" (UID: "9ed39ddf-9274-44fa-8267-59d9c8f1447a"). InnerVolumeSpecName "kube-api-access-5chhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.219732 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "9ed39ddf-9274-44fa-8267-59d9c8f1447a" (UID: "9ed39ddf-9274-44fa-8267-59d9c8f1447a"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.243990 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9ed39ddf-9274-44fa-8267-59d9c8f1447a" (UID: "9ed39ddf-9274-44fa-8267-59d9c8f1447a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.245234 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-inventory" (OuterVolumeSpecName: "inventory") pod "9ed39ddf-9274-44fa-8267-59d9c8f1447a" (UID: "9ed39ddf-9274-44fa-8267-59d9c8f1447a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.315423 4711 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.315665 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5chhd\" (UniqueName: \"kubernetes.io/projected/9ed39ddf-9274-44fa-8267-59d9c8f1447a-kube-api-access-5chhd\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.315763 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.315843 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ed39ddf-9274-44fa-8267-59d9c8f1447a-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.679235 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" event={"ID":"9ed39ddf-9274-44fa-8267-59d9c8f1447a","Type":"ContainerDied","Data":"2462869a9945f6cffded74c8b8dabbd8f92f180e0303bd19a7908b36f93412cd"} Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.679799 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2462869a9945f6cffded74c8b8dabbd8f92f180e0303bd19a7908b36f93412cd" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.679294 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.774487 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8"] Dec 05 12:39:15 crc kubenswrapper[4711]: E1205 12:39:15.774979 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerName="extract-content" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.775004 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerName="extract-content" Dec 05 12:39:15 crc kubenswrapper[4711]: E1205 12:39:15.775053 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerName="extract-utilities" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.775063 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerName="extract-utilities" Dec 05 12:39:15 crc kubenswrapper[4711]: E1205 12:39:15.775079 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerName="registry-server" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.775088 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerName="registry-server" Dec 05 12:39:15 crc kubenswrapper[4711]: E1205 12:39:15.775102 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ed39ddf-9274-44fa-8267-59d9c8f1447a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.775111 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ed39ddf-9274-44fa-8267-59d9c8f1447a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.775350 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ed39ddf-9274-44fa-8267-59d9c8f1447a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.775411 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8f12a15-731e-40dd-ad7b-5514f0c6cf8b" containerName="registry-server" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.777603 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.779654 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.779822 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.779957 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.780143 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.787894 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8"] Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.827543 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65xgb\" (UniqueName: \"kubernetes.io/projected/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-kube-api-access-65xgb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.827594 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.827717 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.929765 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65xgb\" (UniqueName: \"kubernetes.io/projected/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-kube-api-access-65xgb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.930582 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.931456 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.936292 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.936292 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:15 crc kubenswrapper[4711]: I1205 12:39:15.947181 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65xgb\" (UniqueName: \"kubernetes.io/projected/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-kube-api-access-65xgb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:16 crc kubenswrapper[4711]: I1205 12:39:16.105429 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:39:16 crc kubenswrapper[4711]: I1205 12:39:16.638822 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8"] Dec 05 12:39:16 crc kubenswrapper[4711]: I1205 12:39:16.692986 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" event={"ID":"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0","Type":"ContainerStarted","Data":"a43d2b71444fff4e2f079f34f70ab050d6bae249992f5c76c2dca747017e7e92"} Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.049068 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-np4sn"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.064145 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ca3b-account-create-update-mnv67"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.075099 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-ldxm8"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.084380 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-np4sn"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.106873 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-6d77-account-create-update-gh4ds"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.120861 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-tfvxw"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.133843 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-ca3b-account-create-update-mnv67"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.145159 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-ldxm8"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.164779 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-6d77-account-create-update-gh4ds"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.174946 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-tfvxw"] Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.703441 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" event={"ID":"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0","Type":"ContainerStarted","Data":"4a849810fd008bd891b322f34f604a9557735330ddaea67941aab63f47413f68"} Dec 05 12:39:17 crc kubenswrapper[4711]: I1205 12:39:17.730359 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" podStartSLOduration=2.282279542 podStartE2EDuration="2.730201409s" podCreationTimestamp="2025-12-05 12:39:15 +0000 UTC" firstStartedPulling="2025-12-05 12:39:16.636945835 +0000 UTC m=+1802.221268165" lastFinishedPulling="2025-12-05 12:39:17.084867702 +0000 UTC m=+1802.669190032" observedRunningTime="2025-12-05 12:39:17.720588875 +0000 UTC m=+1803.304911205" watchObservedRunningTime="2025-12-05 12:39:17.730201409 +0000 UTC m=+1803.314523739" Dec 05 12:39:18 crc kubenswrapper[4711]: I1205 12:39:18.722266 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2aadbf74-6733-453e-bdfc-f8b8bb7f60da" path="/var/lib/kubelet/pods/2aadbf74-6733-453e-bdfc-f8b8bb7f60da/volumes" Dec 05 12:39:18 crc kubenswrapper[4711]: I1205 12:39:18.723109 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4881af57-6b36-4c72-ba21-4ac8872288a1" path="/var/lib/kubelet/pods/4881af57-6b36-4c72-ba21-4ac8872288a1/volumes" Dec 05 12:39:18 crc kubenswrapper[4711]: I1205 12:39:18.723918 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="847c5ee7-c53a-4173-82ad-313e68c42910" path="/var/lib/kubelet/pods/847c5ee7-c53a-4173-82ad-313e68c42910/volumes" Dec 05 12:39:18 crc kubenswrapper[4711]: I1205 12:39:18.724947 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71" path="/var/lib/kubelet/pods/9caa91dd-f6d6-49a2-ba31-ab3ba2c4bd71/volumes" Dec 05 12:39:18 crc kubenswrapper[4711]: I1205 12:39:18.726417 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d12a5b3d-46d8-41a5-95c8-1f95f238c8b1" path="/var/lib/kubelet/pods/d12a5b3d-46d8-41a5-95c8-1f95f238c8b1/volumes" Dec 05 12:39:26 crc kubenswrapper[4711]: I1205 12:39:26.683381 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:39:26 crc kubenswrapper[4711]: E1205 12:39:26.684205 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.357259 4711 scope.go:117] "RemoveContainer" containerID="cf4104599a770557f3c48b31317f1032f4ef66758473ae6f8eb6e0381b5c84f8" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.397497 4711 scope.go:117] "RemoveContainer" containerID="c1fe3f2c2dfa3c546d4a567302f6a525c40282af87881b216b196a142e3262d8" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.443211 4711 scope.go:117] "RemoveContainer" containerID="241c5ba0f8b47b1f2281e6d3d659be3e8b8b75f6c1b56adedc7d73fcd728d81f" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.495326 4711 scope.go:117] "RemoveContainer" containerID="fdb629b0b1a9720b51a6e90599267a71c0f7e8952c59a880bbd76034c3352032" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.564462 4711 scope.go:117] "RemoveContainer" containerID="c8c9107256b64f80317aa1a558204b85dc735cfc8eabcf4558e2305e84582aa3" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.611317 4711 scope.go:117] "RemoveContainer" containerID="88635986f00781c8eda5d72e0bd8e78ab0a13288cfe76fe85254cabb9914d728" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.668763 4711 scope.go:117] "RemoveContainer" containerID="f238c69518f7100223bf01fb4068d87baa6238ccb75af139fe957500d3905ee8" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.693373 4711 scope.go:117] "RemoveContainer" containerID="b691811c358b80e304d8a1700b70ffdef4f3322967b67174551d323bf4e0449d" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.713824 4711 scope.go:117] "RemoveContainer" containerID="0ff4e599964d18574948483cdd8e7e316cc0778a0bd0f41429c60253f4c007b0" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.736537 4711 scope.go:117] "RemoveContainer" containerID="b784f6ef465395013f1cf88fd1b269c745842264fdbf7c4ad0f83be0aa958acd" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.761531 4711 scope.go:117] "RemoveContainer" containerID="d761a512e557dee327845990959c42d8c101a9816cfeac2cd0cb73475cc7388c" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.782924 4711 scope.go:117] "RemoveContainer" containerID="98a36a74dcc1ed4ce5d280a54d469e2fcc0c8792c70948dc5d786088eb92f28a" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.811163 4711 scope.go:117] "RemoveContainer" containerID="5314bd7fc52affb14a5640a4fb32585b823e3290afe7e83b5c2071facfc9f823" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.833949 4711 scope.go:117] "RemoveContainer" containerID="0d706ca798f565ae9cedbe11bce3e3e7672922350d7b6247c43afa4dc596afe9" Dec 05 12:39:28 crc kubenswrapper[4711]: I1205 12:39:28.855078 4711 scope.go:117] "RemoveContainer" containerID="e266897b512b90204c11e8c27b312996941c58fce56c515d9b381875f129997b" Dec 05 12:39:41 crc kubenswrapper[4711]: I1205 12:39:41.683807 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:39:41 crc kubenswrapper[4711]: E1205 12:39:41.685726 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:39:50 crc kubenswrapper[4711]: I1205 12:39:50.056672 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-h26mq"] Dec 05 12:39:50 crc kubenswrapper[4711]: I1205 12:39:50.066690 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-h26mq"] Dec 05 12:39:50 crc kubenswrapper[4711]: I1205 12:39:50.694121 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c46ee7cc-06c9-41b0-b560-cc35c14dbf00" path="/var/lib/kubelet/pods/c46ee7cc-06c9-41b0-b560-cc35c14dbf00/volumes" Dec 05 12:39:56 crc kubenswrapper[4711]: I1205 12:39:56.683476 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:39:57 crc kubenswrapper[4711]: I1205 12:39:57.097173 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"0154cbb3ce8d498f21d6fffddd10e58f73c3ea89753011e091ff41fc17837fe9"} Dec 05 12:40:03 crc kubenswrapper[4711]: I1205 12:40:03.033376 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-kwn6b"] Dec 05 12:40:03 crc kubenswrapper[4711]: I1205 12:40:03.046033 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-kwn6b"] Dec 05 12:40:04 crc kubenswrapper[4711]: I1205 12:40:04.696756 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37cbdd67-d3c7-4318-9a73-82b9a4e249fa" path="/var/lib/kubelet/pods/37cbdd67-d3c7-4318-9a73-82b9a4e249fa/volumes" Dec 05 12:40:25 crc kubenswrapper[4711]: I1205 12:40:25.043307 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-2g9vv"] Dec 05 12:40:25 crc kubenswrapper[4711]: I1205 12:40:25.054573 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-2g9vv"] Dec 05 12:40:26 crc kubenswrapper[4711]: I1205 12:40:26.694904 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1004a56f-792b-4dae-bee1-6be07e0d72dc" path="/var/lib/kubelet/pods/1004a56f-792b-4dae-bee1-6be07e0d72dc/volumes" Dec 05 12:40:29 crc kubenswrapper[4711]: I1205 12:40:29.115083 4711 scope.go:117] "RemoveContainer" containerID="103600391a03d143244c4b72b376da853989a793ca2842d0221c71aea76bff84" Dec 05 12:40:29 crc kubenswrapper[4711]: I1205 12:40:29.164571 4711 scope.go:117] "RemoveContainer" containerID="e4a2d27b45a182feaf0dd75f0135bed588d1ad8b2e82f9ab5336fe28965b68a9" Dec 05 12:40:29 crc kubenswrapper[4711]: I1205 12:40:29.223559 4711 scope.go:117] "RemoveContainer" containerID="5c128be0ec38b635947cacf350dd5e9459e6140109ccd5dfd0313d35a484e8a9" Dec 05 12:40:39 crc kubenswrapper[4711]: I1205 12:40:39.033205 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-7sl8r"] Dec 05 12:40:39 crc kubenswrapper[4711]: I1205 12:40:39.043950 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-7sl8r"] Dec 05 12:40:40 crc kubenswrapper[4711]: I1205 12:40:40.697047 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89800be3-c463-4e1e-b92a-abb613b5bf5e" path="/var/lib/kubelet/pods/89800be3-c463-4e1e-b92a-abb613b5bf5e/volumes" Dec 05 12:40:45 crc kubenswrapper[4711]: I1205 12:40:45.047961 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-sm4bg"] Dec 05 12:40:45 crc kubenswrapper[4711]: I1205 12:40:45.062190 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-sm4bg"] Dec 05 12:40:46 crc kubenswrapper[4711]: I1205 12:40:46.715377 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddff9a4d-a020-4de4-a114-694bec9908f9" path="/var/lib/kubelet/pods/ddff9a4d-a020-4de4-a114-694bec9908f9/volumes" Dec 05 12:40:48 crc kubenswrapper[4711]: I1205 12:40:48.029909 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-j6vdf"] Dec 05 12:40:48 crc kubenswrapper[4711]: I1205 12:40:48.039969 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-j6vdf"] Dec 05 12:40:48 crc kubenswrapper[4711]: I1205 12:40:48.694919 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da5d7bb4-71d2-458f-aabf-4cb2ed2f4661" path="/var/lib/kubelet/pods/da5d7bb4-71d2-458f-aabf-4cb2ed2f4661/volumes" Dec 05 12:40:52 crc kubenswrapper[4711]: I1205 12:40:52.026591 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-5gbrt"] Dec 05 12:40:52 crc kubenswrapper[4711]: I1205 12:40:52.036200 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-5gbrt"] Dec 05 12:40:52 crc kubenswrapper[4711]: I1205 12:40:52.693252 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e7e345d-f9d1-4c96-9da9-b960d54c7b5b" path="/var/lib/kubelet/pods/5e7e345d-f9d1-4c96-9da9-b960d54c7b5b/volumes" Dec 05 12:41:19 crc kubenswrapper[4711]: I1205 12:41:19.899346 4711 generic.go:334] "Generic (PLEG): container finished" podID="35f05c7f-bc76-4f95-8b4e-dc37a544c8e0" containerID="4a849810fd008bd891b322f34f604a9557735330ddaea67941aab63f47413f68" exitCode=0 Dec 05 12:41:19 crc kubenswrapper[4711]: I1205 12:41:19.899424 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" event={"ID":"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0","Type":"ContainerDied","Data":"4a849810fd008bd891b322f34f604a9557735330ddaea67941aab63f47413f68"} Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.351234 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.488119 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-ssh-key\") pod \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.488232 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-inventory\") pod \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.489138 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65xgb\" (UniqueName: \"kubernetes.io/projected/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-kube-api-access-65xgb\") pod \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\" (UID: \"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0\") " Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.497280 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-kube-api-access-65xgb" (OuterVolumeSpecName: "kube-api-access-65xgb") pod "35f05c7f-bc76-4f95-8b4e-dc37a544c8e0" (UID: "35f05c7f-bc76-4f95-8b4e-dc37a544c8e0"). InnerVolumeSpecName "kube-api-access-65xgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.526115 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-inventory" (OuterVolumeSpecName: "inventory") pod "35f05c7f-bc76-4f95-8b4e-dc37a544c8e0" (UID: "35f05c7f-bc76-4f95-8b4e-dc37a544c8e0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.527996 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "35f05c7f-bc76-4f95-8b4e-dc37a544c8e0" (UID: "35f05c7f-bc76-4f95-8b4e-dc37a544c8e0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.592053 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.592601 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.592617 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65xgb\" (UniqueName: \"kubernetes.io/projected/35f05c7f-bc76-4f95-8b4e-dc37a544c8e0-kube-api-access-65xgb\") on node \"crc\" DevicePath \"\"" Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.920876 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" event={"ID":"35f05c7f-bc76-4f95-8b4e-dc37a544c8e0","Type":"ContainerDied","Data":"a43d2b71444fff4e2f079f34f70ab050d6bae249992f5c76c2dca747017e7e92"} Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.920925 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a43d2b71444fff4e2f079f34f70ab050d6bae249992f5c76c2dca747017e7e92" Dec 05 12:41:21 crc kubenswrapper[4711]: I1205 12:41:21.920986 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.018036 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc"] Dec 05 12:41:22 crc kubenswrapper[4711]: E1205 12:41:22.018486 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f05c7f-bc76-4f95-8b4e-dc37a544c8e0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.018504 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f05c7f-bc76-4f95-8b4e-dc37a544c8e0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.018694 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f05c7f-bc76-4f95-8b4e-dc37a544c8e0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.019630 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.022119 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.022474 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.022845 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.025125 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.032566 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc"] Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.103234 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-72hkc\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.103602 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltxnk\" (UniqueName: \"kubernetes.io/projected/cc20675d-9302-43d3-8faf-74a8bbd8f752-kube-api-access-ltxnk\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-72hkc\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.103779 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-72hkc\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.205864 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-72hkc\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.205933 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-72hkc\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.206042 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltxnk\" (UniqueName: \"kubernetes.io/projected/cc20675d-9302-43d3-8faf-74a8bbd8f752-kube-api-access-ltxnk\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-72hkc\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.212568 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-72hkc\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.216238 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-72hkc\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.223527 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltxnk\" (UniqueName: \"kubernetes.io/projected/cc20675d-9302-43d3-8faf-74a8bbd8f752-kube-api-access-ltxnk\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-72hkc\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.359735 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:41:22 crc kubenswrapper[4711]: I1205 12:41:22.962282 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc"] Dec 05 12:41:23 crc kubenswrapper[4711]: I1205 12:41:23.960179 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" event={"ID":"cc20675d-9302-43d3-8faf-74a8bbd8f752","Type":"ContainerStarted","Data":"9cfa74085473be6062dc66a1960f7f8b29239fb21d749d13044eb5c6985ce641"} Dec 05 12:41:23 crc kubenswrapper[4711]: I1205 12:41:23.960524 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" event={"ID":"cc20675d-9302-43d3-8faf-74a8bbd8f752","Type":"ContainerStarted","Data":"307ce6bd0f106809ec9379fea483f4f2949431a761881967331fb6a1f0ef87b4"} Dec 05 12:41:23 crc kubenswrapper[4711]: I1205 12:41:23.977550 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" podStartSLOduration=2.566182662 podStartE2EDuration="2.977528147s" podCreationTimestamp="2025-12-05 12:41:21 +0000 UTC" firstStartedPulling="2025-12-05 12:41:22.946402037 +0000 UTC m=+1928.530724367" lastFinishedPulling="2025-12-05 12:41:23.357747532 +0000 UTC m=+1928.942069852" observedRunningTime="2025-12-05 12:41:23.977175359 +0000 UTC m=+1929.561497709" watchObservedRunningTime="2025-12-05 12:41:23.977528147 +0000 UTC m=+1929.561850477" Dec 05 12:41:29 crc kubenswrapper[4711]: I1205 12:41:29.350016 4711 scope.go:117] "RemoveContainer" containerID="039647fb3cedc13b9bd16efffa1e1a5553e27cbcbadb9b7a8b3340cbd5e09621" Dec 05 12:41:29 crc kubenswrapper[4711]: I1205 12:41:29.396828 4711 scope.go:117] "RemoveContainer" containerID="74140c15b9c50e00a2dc82b3d4b1074606e814fe48324d3bb6a5dc41c13ddcf4" Dec 05 12:41:29 crc kubenswrapper[4711]: I1205 12:41:29.459037 4711 scope.go:117] "RemoveContainer" containerID="6930b6c7d4ac8c11c92e437f563aa3ea5fa4d8ffbb9a75e0aab2327b4cdd0aa2" Dec 05 12:41:29 crc kubenswrapper[4711]: I1205 12:41:29.512202 4711 scope.go:117] "RemoveContainer" containerID="c3d7238ac1459cfd25423978aa6be3db4a7dc0de917f4e2c4b2f8ff6306c34a5" Dec 05 12:41:38 crc kubenswrapper[4711]: I1205 12:41:38.044311 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-kmnxp"] Dec 05 12:41:38 crc kubenswrapper[4711]: I1205 12:41:38.052493 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-kmnxp"] Dec 05 12:41:38 crc kubenswrapper[4711]: I1205 12:41:38.697680 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="250ba9fe-e717-439d-9616-857bc376a77e" path="/var/lib/kubelet/pods/250ba9fe-e717-439d-9616-857bc376a77e/volumes" Dec 05 12:41:39 crc kubenswrapper[4711]: I1205 12:41:39.035042 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-lmvv6"] Dec 05 12:41:39 crc kubenswrapper[4711]: I1205 12:41:39.044195 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-3e3b-account-create-update-tv8bt"] Dec 05 12:41:39 crc kubenswrapper[4711]: I1205 12:41:39.053583 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-c164-account-create-update-pd8cn"] Dec 05 12:41:39 crc kubenswrapper[4711]: I1205 12:41:39.061941 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-lmvv6"] Dec 05 12:41:39 crc kubenswrapper[4711]: I1205 12:41:39.069765 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-3e3b-account-create-update-tv8bt"] Dec 05 12:41:39 crc kubenswrapper[4711]: I1205 12:41:39.076786 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-c164-account-create-update-pd8cn"] Dec 05 12:41:40 crc kubenswrapper[4711]: I1205 12:41:40.027147 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-e1ed-account-create-update-tc6t7"] Dec 05 12:41:40 crc kubenswrapper[4711]: I1205 12:41:40.037798 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-9rrzg"] Dec 05 12:41:40 crc kubenswrapper[4711]: I1205 12:41:40.047303 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-e1ed-account-create-update-tc6t7"] Dec 05 12:41:40 crc kubenswrapper[4711]: I1205 12:41:40.055532 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-9rrzg"] Dec 05 12:41:40 crc kubenswrapper[4711]: I1205 12:41:40.695786 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09675ca4-9d45-4a8f-b26a-74aa1f3410e9" path="/var/lib/kubelet/pods/09675ca4-9d45-4a8f-b26a-74aa1f3410e9/volumes" Dec 05 12:41:40 crc kubenswrapper[4711]: I1205 12:41:40.696448 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ad4bc45-298a-4ac6-877f-3022b22780cb" path="/var/lib/kubelet/pods/6ad4bc45-298a-4ac6-877f-3022b22780cb/volumes" Dec 05 12:41:40 crc kubenswrapper[4711]: I1205 12:41:40.697061 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7aa9010c-513f-407e-add2-37b2f0399865" path="/var/lib/kubelet/pods/7aa9010c-513f-407e-add2-37b2f0399865/volumes" Dec 05 12:41:40 crc kubenswrapper[4711]: I1205 12:41:40.697657 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee061325-4a4d-400a-91fb-9b7d2bf3b383" path="/var/lib/kubelet/pods/ee061325-4a4d-400a-91fb-9b7d2bf3b383/volumes" Dec 05 12:41:40 crc kubenswrapper[4711]: I1205 12:41:40.698870 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f68d680a-2f6f-4227-a356-1005fa0084b6" path="/var/lib/kubelet/pods/f68d680a-2f6f-4227-a356-1005fa0084b6/volumes" Dec 05 12:42:00 crc kubenswrapper[4711]: I1205 12:42:00.748816 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-mdk4g" podUID="f6b616b6-23f5-4671-8d91-cc11317f07a6" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.77:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:42:00 crc kubenswrapper[4711]: I1205 12:42:00.813611 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-4nlqw" podUID="61d6cab9-9cd8-443b-ba0f-90de0670366b" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.80:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:42:15 crc kubenswrapper[4711]: I1205 12:42:15.045484 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k9dtz"] Dec 05 12:42:15 crc kubenswrapper[4711]: I1205 12:42:15.057042 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k9dtz"] Dec 05 12:42:16 crc kubenswrapper[4711]: I1205 12:42:16.693437 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8" path="/var/lib/kubelet/pods/6feb7c1f-2d77-4b1a-8c35-1b7afb48a8c8/volumes" Dec 05 12:42:18 crc kubenswrapper[4711]: I1205 12:42:18.300635 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:42:18 crc kubenswrapper[4711]: I1205 12:42:18.301662 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:42:29 crc kubenswrapper[4711]: I1205 12:42:29.657685 4711 scope.go:117] "RemoveContainer" containerID="3a8802f0d43c02de1793626e29f0e747e8a9ff352817d51546fe0cca8f683804" Dec 05 12:42:29 crc kubenswrapper[4711]: I1205 12:42:29.684759 4711 scope.go:117] "RemoveContainer" containerID="5de956d357f566a1ea7a690fd999f7c088c3eb2aeddf92d294ff98ed175a10cc" Dec 05 12:42:29 crc kubenswrapper[4711]: I1205 12:42:29.756833 4711 scope.go:117] "RemoveContainer" containerID="fc6ffb95647d2dd3c2afc5a21057a5b59271165f14a79d0162e5af29cf89e81f" Dec 05 12:42:29 crc kubenswrapper[4711]: I1205 12:42:29.793921 4711 scope.go:117] "RemoveContainer" containerID="fdaaf6a4a5793004b5f54a98b5c326a3baea5a2cafa3509ff5782ce334d48f36" Dec 05 12:42:29 crc kubenswrapper[4711]: I1205 12:42:29.852662 4711 scope.go:117] "RemoveContainer" containerID="a5ca090c39c1f25b306924e19653964389568ec2f929b9fe6b9084d0518e9181" Dec 05 12:42:29 crc kubenswrapper[4711]: I1205 12:42:29.887613 4711 scope.go:117] "RemoveContainer" containerID="324c860ebe5858829a4c7bfa335ee69c77f7de48e37de40d0f850ac730609138" Dec 05 12:42:29 crc kubenswrapper[4711]: I1205 12:42:29.931631 4711 scope.go:117] "RemoveContainer" containerID="df160c490f72fbe482eebaaad2345427901a9b41395aa3bdb0b7882db9767f05" Dec 05 12:42:39 crc kubenswrapper[4711]: I1205 12:42:39.035640 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-2hnmx"] Dec 05 12:42:39 crc kubenswrapper[4711]: I1205 12:42:39.047744 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-2hnmx"] Dec 05 12:42:40 crc kubenswrapper[4711]: I1205 12:42:40.695645 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5592302-99d4-4e3c-948b-b9bccef58e6f" path="/var/lib/kubelet/pods/a5592302-99d4-4e3c-948b-b9bccef58e6f/volumes" Dec 05 12:42:44 crc kubenswrapper[4711]: I1205 12:42:44.595537 4711 generic.go:334] "Generic (PLEG): container finished" podID="cc20675d-9302-43d3-8faf-74a8bbd8f752" containerID="9cfa74085473be6062dc66a1960f7f8b29239fb21d749d13044eb5c6985ce641" exitCode=0 Dec 05 12:42:44 crc kubenswrapper[4711]: I1205 12:42:44.596037 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" event={"ID":"cc20675d-9302-43d3-8faf-74a8bbd8f752","Type":"ContainerDied","Data":"9cfa74085473be6062dc66a1960f7f8b29239fb21d749d13044eb5c6985ce641"} Dec 05 12:42:45 crc kubenswrapper[4711]: I1205 12:42:45.040565 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sgvbg"] Dec 05 12:42:45 crc kubenswrapper[4711]: I1205 12:42:45.050626 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sgvbg"] Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.011947 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.072561 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-ssh-key\") pod \"cc20675d-9302-43d3-8faf-74a8bbd8f752\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.072671 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltxnk\" (UniqueName: \"kubernetes.io/projected/cc20675d-9302-43d3-8faf-74a8bbd8f752-kube-api-access-ltxnk\") pod \"cc20675d-9302-43d3-8faf-74a8bbd8f752\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.072725 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-inventory\") pod \"cc20675d-9302-43d3-8faf-74a8bbd8f752\" (UID: \"cc20675d-9302-43d3-8faf-74a8bbd8f752\") " Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.077636 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc20675d-9302-43d3-8faf-74a8bbd8f752-kube-api-access-ltxnk" (OuterVolumeSpecName: "kube-api-access-ltxnk") pod "cc20675d-9302-43d3-8faf-74a8bbd8f752" (UID: "cc20675d-9302-43d3-8faf-74a8bbd8f752"). InnerVolumeSpecName "kube-api-access-ltxnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.100291 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-inventory" (OuterVolumeSpecName: "inventory") pod "cc20675d-9302-43d3-8faf-74a8bbd8f752" (UID: "cc20675d-9302-43d3-8faf-74a8bbd8f752"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.100698 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cc20675d-9302-43d3-8faf-74a8bbd8f752" (UID: "cc20675d-9302-43d3-8faf-74a8bbd8f752"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.175168 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltxnk\" (UniqueName: \"kubernetes.io/projected/cc20675d-9302-43d3-8faf-74a8bbd8f752-kube-api-access-ltxnk\") on node \"crc\" DevicePath \"\"" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.175217 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.175226 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc20675d-9302-43d3-8faf-74a8bbd8f752-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.617354 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" event={"ID":"cc20675d-9302-43d3-8faf-74a8bbd8f752","Type":"ContainerDied","Data":"307ce6bd0f106809ec9379fea483f4f2949431a761881967331fb6a1f0ef87b4"} Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.617419 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="307ce6bd0f106809ec9379fea483f4f2949431a761881967331fb6a1f0ef87b4" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.617479 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-72hkc" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.696748 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="001b7802-4a74-4409-8667-128892193313" path="/var/lib/kubelet/pods/001b7802-4a74-4409-8667-128892193313/volumes" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.701837 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv"] Dec 05 12:42:46 crc kubenswrapper[4711]: E1205 12:42:46.702580 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc20675d-9302-43d3-8faf-74a8bbd8f752" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.702659 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc20675d-9302-43d3-8faf-74a8bbd8f752" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.703001 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc20675d-9302-43d3-8faf-74a8bbd8f752" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.703893 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.706127 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.706603 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.706668 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.706764 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.718526 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv"] Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.785656 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7hqt\" (UniqueName: \"kubernetes.io/projected/e56926d2-8bf7-4142-91eb-470bd969cbd3-kube-api-access-s7hqt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j82gv\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.785894 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j82gv\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.785999 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j82gv\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.888168 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j82gv\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.888294 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j82gv\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.888374 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7hqt\" (UniqueName: \"kubernetes.io/projected/e56926d2-8bf7-4142-91eb-470bd969cbd3-kube-api-access-s7hqt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j82gv\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.892660 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j82gv\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.892978 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j82gv\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:46 crc kubenswrapper[4711]: I1205 12:42:46.915369 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7hqt\" (UniqueName: \"kubernetes.io/projected/e56926d2-8bf7-4142-91eb-470bd969cbd3-kube-api-access-s7hqt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j82gv\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:47 crc kubenswrapper[4711]: I1205 12:42:47.037648 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:47 crc kubenswrapper[4711]: I1205 12:42:47.544655 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv"] Dec 05 12:42:47 crc kubenswrapper[4711]: I1205 12:42:47.627691 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" event={"ID":"e56926d2-8bf7-4142-91eb-470bd969cbd3","Type":"ContainerStarted","Data":"65dfc2e69dca919ce5036177c1c83323e4e42e68d4e7a4c3ebf97c1e03fffbe8"} Dec 05 12:42:48 crc kubenswrapper[4711]: I1205 12:42:48.300470 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:42:48 crc kubenswrapper[4711]: I1205 12:42:48.300865 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:42:48 crc kubenswrapper[4711]: I1205 12:42:48.640659 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" event={"ID":"e56926d2-8bf7-4142-91eb-470bd969cbd3","Type":"ContainerStarted","Data":"ca151c8e23c23ff62b288f3efaba9722f5cf3a0f72296dfe0edc4e3d1f9e4545"} Dec 05 12:42:48 crc kubenswrapper[4711]: I1205 12:42:48.660464 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" podStartSLOduration=2.192810093 podStartE2EDuration="2.66043923s" podCreationTimestamp="2025-12-05 12:42:46 +0000 UTC" firstStartedPulling="2025-12-05 12:42:47.549166248 +0000 UTC m=+2013.133488578" lastFinishedPulling="2025-12-05 12:42:48.016795385 +0000 UTC m=+2013.601117715" observedRunningTime="2025-12-05 12:42:48.654797782 +0000 UTC m=+2014.239120192" watchObservedRunningTime="2025-12-05 12:42:48.66043923 +0000 UTC m=+2014.244761560" Dec 05 12:42:53 crc kubenswrapper[4711]: I1205 12:42:53.685741 4711 generic.go:334] "Generic (PLEG): container finished" podID="e56926d2-8bf7-4142-91eb-470bd969cbd3" containerID="ca151c8e23c23ff62b288f3efaba9722f5cf3a0f72296dfe0edc4e3d1f9e4545" exitCode=0 Dec 05 12:42:53 crc kubenswrapper[4711]: I1205 12:42:53.687260 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" event={"ID":"e56926d2-8bf7-4142-91eb-470bd969cbd3","Type":"ContainerDied","Data":"ca151c8e23c23ff62b288f3efaba9722f5cf3a0f72296dfe0edc4e3d1f9e4545"} Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.089761 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.156515 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-ssh-key\") pod \"e56926d2-8bf7-4142-91eb-470bd969cbd3\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.156787 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-inventory\") pod \"e56926d2-8bf7-4142-91eb-470bd969cbd3\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.156869 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7hqt\" (UniqueName: \"kubernetes.io/projected/e56926d2-8bf7-4142-91eb-470bd969cbd3-kube-api-access-s7hqt\") pod \"e56926d2-8bf7-4142-91eb-470bd969cbd3\" (UID: \"e56926d2-8bf7-4142-91eb-470bd969cbd3\") " Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.163287 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e56926d2-8bf7-4142-91eb-470bd969cbd3-kube-api-access-s7hqt" (OuterVolumeSpecName: "kube-api-access-s7hqt") pod "e56926d2-8bf7-4142-91eb-470bd969cbd3" (UID: "e56926d2-8bf7-4142-91eb-470bd969cbd3"). InnerVolumeSpecName "kube-api-access-s7hqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.185524 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-inventory" (OuterVolumeSpecName: "inventory") pod "e56926d2-8bf7-4142-91eb-470bd969cbd3" (UID: "e56926d2-8bf7-4142-91eb-470bd969cbd3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.195127 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e56926d2-8bf7-4142-91eb-470bd969cbd3" (UID: "e56926d2-8bf7-4142-91eb-470bd969cbd3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.260012 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.260054 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e56926d2-8bf7-4142-91eb-470bd969cbd3-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.260070 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7hqt\" (UniqueName: \"kubernetes.io/projected/e56926d2-8bf7-4142-91eb-470bd969cbd3-kube-api-access-s7hqt\") on node \"crc\" DevicePath \"\"" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.703820 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" event={"ID":"e56926d2-8bf7-4142-91eb-470bd969cbd3","Type":"ContainerDied","Data":"65dfc2e69dca919ce5036177c1c83323e4e42e68d4e7a4c3ebf97c1e03fffbe8"} Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.704213 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65dfc2e69dca919ce5036177c1c83323e4e42e68d4e7a4c3ebf97c1e03fffbe8" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.704112 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j82gv" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.795886 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt"] Dec 05 12:42:55 crc kubenswrapper[4711]: E1205 12:42:55.796483 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e56926d2-8bf7-4142-91eb-470bd969cbd3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.796504 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e56926d2-8bf7-4142-91eb-470bd969cbd3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.796710 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e56926d2-8bf7-4142-91eb-470bd969cbd3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.797497 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.799731 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.799905 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.801955 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.802192 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.823778 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt"] Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.873023 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-phtdt\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.873144 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-phtdt\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.873202 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bdqn\" (UniqueName: \"kubernetes.io/projected/f8d11d11-1dd3-475f-8715-54ce9afc0c18-kube-api-access-8bdqn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-phtdt\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.976366 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-phtdt\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.976485 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-phtdt\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.976527 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bdqn\" (UniqueName: \"kubernetes.io/projected/f8d11d11-1dd3-475f-8715-54ce9afc0c18-kube-api-access-8bdqn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-phtdt\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.981851 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-phtdt\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.982072 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-phtdt\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:55 crc kubenswrapper[4711]: I1205 12:42:55.999673 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bdqn\" (UniqueName: \"kubernetes.io/projected/f8d11d11-1dd3-475f-8715-54ce9afc0c18-kube-api-access-8bdqn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-phtdt\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:56 crc kubenswrapper[4711]: I1205 12:42:56.135435 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:42:56 crc kubenswrapper[4711]: I1205 12:42:56.670954 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt"] Dec 05 12:42:56 crc kubenswrapper[4711]: W1205 12:42:56.673457 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8d11d11_1dd3_475f_8715_54ce9afc0c18.slice/crio-6ab7ef7c62c32ddafe90921957814949484bf49e98d5fe4f52dbd564515f3d21 WatchSource:0}: Error finding container 6ab7ef7c62c32ddafe90921957814949484bf49e98d5fe4f52dbd564515f3d21: Status 404 returned error can't find the container with id 6ab7ef7c62c32ddafe90921957814949484bf49e98d5fe4f52dbd564515f3d21 Dec 05 12:42:56 crc kubenswrapper[4711]: I1205 12:42:56.720141 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" event={"ID":"f8d11d11-1dd3-475f-8715-54ce9afc0c18","Type":"ContainerStarted","Data":"6ab7ef7c62c32ddafe90921957814949484bf49e98d5fe4f52dbd564515f3d21"} Dec 05 12:42:57 crc kubenswrapper[4711]: I1205 12:42:57.736889 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" event={"ID":"f8d11d11-1dd3-475f-8715-54ce9afc0c18","Type":"ContainerStarted","Data":"e824de73c02f9994a0d3270b647aa8fa2d9bc7c5c19153cdaf2beba9e36a9093"} Dec 05 12:42:57 crc kubenswrapper[4711]: I1205 12:42:57.759596 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" podStartSLOduration=2.358019625 podStartE2EDuration="2.759574951s" podCreationTimestamp="2025-12-05 12:42:55 +0000 UTC" firstStartedPulling="2025-12-05 12:42:56.67585368 +0000 UTC m=+2022.260176010" lastFinishedPulling="2025-12-05 12:42:57.077408996 +0000 UTC m=+2022.661731336" observedRunningTime="2025-12-05 12:42:57.753606896 +0000 UTC m=+2023.337929226" watchObservedRunningTime="2025-12-05 12:42:57.759574951 +0000 UTC m=+2023.343897281" Dec 05 12:43:18 crc kubenswrapper[4711]: I1205 12:43:18.300948 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:43:18 crc kubenswrapper[4711]: I1205 12:43:18.301579 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:43:18 crc kubenswrapper[4711]: I1205 12:43:18.301654 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:43:18 crc kubenswrapper[4711]: I1205 12:43:18.302494 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0154cbb3ce8d498f21d6fffddd10e58f73c3ea89753011e091ff41fc17837fe9"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:43:18 crc kubenswrapper[4711]: I1205 12:43:18.302558 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://0154cbb3ce8d498f21d6fffddd10e58f73c3ea89753011e091ff41fc17837fe9" gracePeriod=600 Dec 05 12:43:18 crc kubenswrapper[4711]: I1205 12:43:18.933948 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="0154cbb3ce8d498f21d6fffddd10e58f73c3ea89753011e091ff41fc17837fe9" exitCode=0 Dec 05 12:43:18 crc kubenswrapper[4711]: I1205 12:43:18.934004 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"0154cbb3ce8d498f21d6fffddd10e58f73c3ea89753011e091ff41fc17837fe9"} Dec 05 12:43:18 crc kubenswrapper[4711]: I1205 12:43:18.934346 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69"} Dec 05 12:43:18 crc kubenswrapper[4711]: I1205 12:43:18.934370 4711 scope.go:117] "RemoveContainer" containerID="6cc8fd9fb9de3af38cb24fef618f6c87c6c63f2b74bd7477d26361d8885d64d0" Dec 05 12:43:25 crc kubenswrapper[4711]: I1205 12:43:25.046105 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-qwvs4"] Dec 05 12:43:25 crc kubenswrapper[4711]: I1205 12:43:25.056914 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-qwvs4"] Dec 05 12:43:26 crc kubenswrapper[4711]: I1205 12:43:26.694915 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4927b347-0a86-4ab8-8a48-345cd36637b4" path="/var/lib/kubelet/pods/4927b347-0a86-4ab8-8a48-345cd36637b4/volumes" Dec 05 12:43:30 crc kubenswrapper[4711]: I1205 12:43:30.059825 4711 scope.go:117] "RemoveContainer" containerID="8a5a8f0fde0ae46fb1b38fa8e549a85e2558f601d06143ff46f4fddb87e7870e" Dec 05 12:43:30 crc kubenswrapper[4711]: I1205 12:43:30.103514 4711 scope.go:117] "RemoveContainer" containerID="dbe85db4d6cff17470dc1fe98399ec484e8773d53f5545c66cae3285282655a0" Dec 05 12:43:30 crc kubenswrapper[4711]: I1205 12:43:30.174214 4711 scope.go:117] "RemoveContainer" containerID="2bdfb993dfe4a10d9fbe22bbb422bd944d224d3a39975fe62edca57dd4e16fc2" Dec 05 12:43:39 crc kubenswrapper[4711]: I1205 12:43:39.115524 4711 generic.go:334] "Generic (PLEG): container finished" podID="f8d11d11-1dd3-475f-8715-54ce9afc0c18" containerID="e824de73c02f9994a0d3270b647aa8fa2d9bc7c5c19153cdaf2beba9e36a9093" exitCode=0 Dec 05 12:43:39 crc kubenswrapper[4711]: I1205 12:43:39.115598 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" event={"ID":"f8d11d11-1dd3-475f-8715-54ce9afc0c18","Type":"ContainerDied","Data":"e824de73c02f9994a0d3270b647aa8fa2d9bc7c5c19153cdaf2beba9e36a9093"} Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.581942 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.722100 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-ssh-key\") pod \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.722416 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-inventory\") pod \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.722495 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bdqn\" (UniqueName: \"kubernetes.io/projected/f8d11d11-1dd3-475f-8715-54ce9afc0c18-kube-api-access-8bdqn\") pod \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\" (UID: \"f8d11d11-1dd3-475f-8715-54ce9afc0c18\") " Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.736539 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8d11d11-1dd3-475f-8715-54ce9afc0c18-kube-api-access-8bdqn" (OuterVolumeSpecName: "kube-api-access-8bdqn") pod "f8d11d11-1dd3-475f-8715-54ce9afc0c18" (UID: "f8d11d11-1dd3-475f-8715-54ce9afc0c18"). InnerVolumeSpecName "kube-api-access-8bdqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.769049 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f8d11d11-1dd3-475f-8715-54ce9afc0c18" (UID: "f8d11d11-1dd3-475f-8715-54ce9afc0c18"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.770190 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-inventory" (OuterVolumeSpecName: "inventory") pod "f8d11d11-1dd3-475f-8715-54ce9afc0c18" (UID: "f8d11d11-1dd3-475f-8715-54ce9afc0c18"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.833830 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.833873 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8d11d11-1dd3-475f-8715-54ce9afc0c18-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:40 crc kubenswrapper[4711]: I1205 12:43:40.833885 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bdqn\" (UniqueName: \"kubernetes.io/projected/f8d11d11-1dd3-475f-8715-54ce9afc0c18-kube-api-access-8bdqn\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.145815 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" event={"ID":"f8d11d11-1dd3-475f-8715-54ce9afc0c18","Type":"ContainerDied","Data":"6ab7ef7c62c32ddafe90921957814949484bf49e98d5fe4f52dbd564515f3d21"} Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.145868 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ab7ef7c62c32ddafe90921957814949484bf49e98d5fe4f52dbd564515f3d21" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.145966 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-phtdt" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.224725 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv"] Dec 05 12:43:41 crc kubenswrapper[4711]: E1205 12:43:41.225122 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8d11d11-1dd3-475f-8715-54ce9afc0c18" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.225139 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8d11d11-1dd3-475f-8715-54ce9afc0c18" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.225334 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8d11d11-1dd3-475f-8715-54ce9afc0c18" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.225979 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.227971 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.228299 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.230952 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.233413 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.240364 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv"] Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.342935 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-w64bv\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.343160 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-w64bv\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.343255 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8cc9\" (UniqueName: \"kubernetes.io/projected/fa005b6f-fb79-4a1f-be0e-ec72a8680996-kube-api-access-z8cc9\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-w64bv\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.445411 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-w64bv\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.445493 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-w64bv\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.445561 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8cc9\" (UniqueName: \"kubernetes.io/projected/fa005b6f-fb79-4a1f-be0e-ec72a8680996-kube-api-access-z8cc9\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-w64bv\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.449398 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-w64bv\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.449475 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-w64bv\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.461372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8cc9\" (UniqueName: \"kubernetes.io/projected/fa005b6f-fb79-4a1f-be0e-ec72a8680996-kube-api-access-z8cc9\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-w64bv\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:41 crc kubenswrapper[4711]: I1205 12:43:41.545642 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:43:42 crc kubenswrapper[4711]: I1205 12:43:42.076371 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:43:42 crc kubenswrapper[4711]: I1205 12:43:42.083264 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv"] Dec 05 12:43:42 crc kubenswrapper[4711]: I1205 12:43:42.155194 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" event={"ID":"fa005b6f-fb79-4a1f-be0e-ec72a8680996","Type":"ContainerStarted","Data":"af1b65863bb981b3f8be9dac62f76e9df2d3c381e42b2f1aa07bad4025a5eb8d"} Dec 05 12:43:43 crc kubenswrapper[4711]: I1205 12:43:43.164533 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" event={"ID":"fa005b6f-fb79-4a1f-be0e-ec72a8680996","Type":"ContainerStarted","Data":"4a7199b891aa4af759027e5b189ab59efbcfe6fb088c07b5328dc87f81b5d112"} Dec 05 12:43:43 crc kubenswrapper[4711]: I1205 12:43:43.178435 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" podStartSLOduration=1.465652504 podStartE2EDuration="2.178417835s" podCreationTimestamp="2025-12-05 12:43:41 +0000 UTC" firstStartedPulling="2025-12-05 12:43:42.076095911 +0000 UTC m=+2067.660418241" lastFinishedPulling="2025-12-05 12:43:42.788861242 +0000 UTC m=+2068.373183572" observedRunningTime="2025-12-05 12:43:43.175870713 +0000 UTC m=+2068.760193063" watchObservedRunningTime="2025-12-05 12:43:43.178417835 +0000 UTC m=+2068.762740165" Dec 05 12:44:34 crc kubenswrapper[4711]: I1205 12:44:34.614435 4711 generic.go:334] "Generic (PLEG): container finished" podID="fa005b6f-fb79-4a1f-be0e-ec72a8680996" containerID="4a7199b891aa4af759027e5b189ab59efbcfe6fb088c07b5328dc87f81b5d112" exitCode=0 Dec 05 12:44:34 crc kubenswrapper[4711]: I1205 12:44:34.614503 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" event={"ID":"fa005b6f-fb79-4a1f-be0e-ec72a8680996","Type":"ContainerDied","Data":"4a7199b891aa4af759027e5b189ab59efbcfe6fb088c07b5328dc87f81b5d112"} Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.060929 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.219235 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-ssh-key\") pod \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.219453 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-inventory\") pod \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.219475 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8cc9\" (UniqueName: \"kubernetes.io/projected/fa005b6f-fb79-4a1f-be0e-ec72a8680996-kube-api-access-z8cc9\") pod \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\" (UID: \"fa005b6f-fb79-4a1f-be0e-ec72a8680996\") " Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.224790 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa005b6f-fb79-4a1f-be0e-ec72a8680996-kube-api-access-z8cc9" (OuterVolumeSpecName: "kube-api-access-z8cc9") pod "fa005b6f-fb79-4a1f-be0e-ec72a8680996" (UID: "fa005b6f-fb79-4a1f-be0e-ec72a8680996"). InnerVolumeSpecName "kube-api-access-z8cc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.248103 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fa005b6f-fb79-4a1f-be0e-ec72a8680996" (UID: "fa005b6f-fb79-4a1f-be0e-ec72a8680996"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.253631 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-inventory" (OuterVolumeSpecName: "inventory") pod "fa005b6f-fb79-4a1f-be0e-ec72a8680996" (UID: "fa005b6f-fb79-4a1f-be0e-ec72a8680996"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.321780 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.321834 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8cc9\" (UniqueName: \"kubernetes.io/projected/fa005b6f-fb79-4a1f-be0e-ec72a8680996-kube-api-access-z8cc9\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.321847 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa005b6f-fb79-4a1f-be0e-ec72a8680996-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.647383 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" event={"ID":"fa005b6f-fb79-4a1f-be0e-ec72a8680996","Type":"ContainerDied","Data":"af1b65863bb981b3f8be9dac62f76e9df2d3c381e42b2f1aa07bad4025a5eb8d"} Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.647443 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af1b65863bb981b3f8be9dac62f76e9df2d3c381e42b2f1aa07bad4025a5eb8d" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.647479 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-w64bv" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.722239 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6kgdc"] Dec 05 12:44:36 crc kubenswrapper[4711]: E1205 12:44:36.722748 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa005b6f-fb79-4a1f-be0e-ec72a8680996" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.722771 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa005b6f-fb79-4a1f-be0e-ec72a8680996" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.723031 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa005b6f-fb79-4a1f-be0e-ec72a8680996" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.723916 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.726755 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.727035 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.727318 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.727542 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.745452 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6kgdc"] Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.830606 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf7k2\" (UniqueName: \"kubernetes.io/projected/80bc6810-0d4b-430b-b96a-6606cc41d7b9-kube-api-access-zf7k2\") pod \"ssh-known-hosts-edpm-deployment-6kgdc\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.830901 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-6kgdc\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.831137 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-6kgdc\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.932808 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-6kgdc\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.932919 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf7k2\" (UniqueName: \"kubernetes.io/projected/80bc6810-0d4b-430b-b96a-6606cc41d7b9-kube-api-access-zf7k2\") pod \"ssh-known-hosts-edpm-deployment-6kgdc\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.933046 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-6kgdc\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.941004 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-6kgdc\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.941051 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-6kgdc\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:36 crc kubenswrapper[4711]: I1205 12:44:36.951399 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf7k2\" (UniqueName: \"kubernetes.io/projected/80bc6810-0d4b-430b-b96a-6606cc41d7b9-kube-api-access-zf7k2\") pod \"ssh-known-hosts-edpm-deployment-6kgdc\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:37 crc kubenswrapper[4711]: I1205 12:44:37.042372 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:37 crc kubenswrapper[4711]: I1205 12:44:37.572488 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6kgdc"] Dec 05 12:44:37 crc kubenswrapper[4711]: W1205 12:44:37.580924 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80bc6810_0d4b_430b_b96a_6606cc41d7b9.slice/crio-1a67d019b2a57bb8078b06af8f0adc612e3b291c8a04f8eb349065b6ccc84de2 WatchSource:0}: Error finding container 1a67d019b2a57bb8078b06af8f0adc612e3b291c8a04f8eb349065b6ccc84de2: Status 404 returned error can't find the container with id 1a67d019b2a57bb8078b06af8f0adc612e3b291c8a04f8eb349065b6ccc84de2 Dec 05 12:44:37 crc kubenswrapper[4711]: I1205 12:44:37.658203 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" event={"ID":"80bc6810-0d4b-430b-b96a-6606cc41d7b9","Type":"ContainerStarted","Data":"1a67d019b2a57bb8078b06af8f0adc612e3b291c8a04f8eb349065b6ccc84de2"} Dec 05 12:44:38 crc kubenswrapper[4711]: I1205 12:44:38.672231 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" event={"ID":"80bc6810-0d4b-430b-b96a-6606cc41d7b9","Type":"ContainerStarted","Data":"342872047bc5b1b489341d3b18abe813330e1b35de734fce8270267fb9177661"} Dec 05 12:44:38 crc kubenswrapper[4711]: I1205 12:44:38.697300 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" podStartSLOduration=2.300564902 podStartE2EDuration="2.697276201s" podCreationTimestamp="2025-12-05 12:44:36 +0000 UTC" firstStartedPulling="2025-12-05 12:44:37.585470205 +0000 UTC m=+2123.169792535" lastFinishedPulling="2025-12-05 12:44:37.982181504 +0000 UTC m=+2123.566503834" observedRunningTime="2025-12-05 12:44:38.683840554 +0000 UTC m=+2124.268162884" watchObservedRunningTime="2025-12-05 12:44:38.697276201 +0000 UTC m=+2124.281598561" Dec 05 12:44:45 crc kubenswrapper[4711]: I1205 12:44:45.732899 4711 generic.go:334] "Generic (PLEG): container finished" podID="80bc6810-0d4b-430b-b96a-6606cc41d7b9" containerID="342872047bc5b1b489341d3b18abe813330e1b35de734fce8270267fb9177661" exitCode=0 Dec 05 12:44:45 crc kubenswrapper[4711]: I1205 12:44:45.732964 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" event={"ID":"80bc6810-0d4b-430b-b96a-6606cc41d7b9","Type":"ContainerDied","Data":"342872047bc5b1b489341d3b18abe813330e1b35de734fce8270267fb9177661"} Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.110216 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.234788 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-ssh-key-openstack-edpm-ipam\") pod \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.234889 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-inventory-0\") pod \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.234943 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf7k2\" (UniqueName: \"kubernetes.io/projected/80bc6810-0d4b-430b-b96a-6606cc41d7b9-kube-api-access-zf7k2\") pod \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\" (UID: \"80bc6810-0d4b-430b-b96a-6606cc41d7b9\") " Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.241030 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80bc6810-0d4b-430b-b96a-6606cc41d7b9-kube-api-access-zf7k2" (OuterVolumeSpecName: "kube-api-access-zf7k2") pod "80bc6810-0d4b-430b-b96a-6606cc41d7b9" (UID: "80bc6810-0d4b-430b-b96a-6606cc41d7b9"). InnerVolumeSpecName "kube-api-access-zf7k2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.265586 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "80bc6810-0d4b-430b-b96a-6606cc41d7b9" (UID: "80bc6810-0d4b-430b-b96a-6606cc41d7b9"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.269420 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "80bc6810-0d4b-430b-b96a-6606cc41d7b9" (UID: "80bc6810-0d4b-430b-b96a-6606cc41d7b9"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.337864 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.337918 4711 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/80bc6810-0d4b-430b-b96a-6606cc41d7b9-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.337931 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf7k2\" (UniqueName: \"kubernetes.io/projected/80bc6810-0d4b-430b-b96a-6606cc41d7b9-kube-api-access-zf7k2\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.804733 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" event={"ID":"80bc6810-0d4b-430b-b96a-6606cc41d7b9","Type":"ContainerDied","Data":"1a67d019b2a57bb8078b06af8f0adc612e3b291c8a04f8eb349065b6ccc84de2"} Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.804784 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a67d019b2a57bb8078b06af8f0adc612e3b291c8a04f8eb349065b6ccc84de2" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.804858 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6kgdc" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.877146 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz"] Dec 05 12:44:47 crc kubenswrapper[4711]: E1205 12:44:47.877947 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80bc6810-0d4b-430b-b96a-6606cc41d7b9" containerName="ssh-known-hosts-edpm-deployment" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.877968 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="80bc6810-0d4b-430b-b96a-6606cc41d7b9" containerName="ssh-known-hosts-edpm-deployment" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.878237 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="80bc6810-0d4b-430b-b96a-6606cc41d7b9" containerName="ssh-known-hosts-edpm-deployment" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.879114 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.881257 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.881474 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.882511 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.883434 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.895709 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz"] Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.950233 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n6cnz\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.950320 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n6cnz\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:47 crc kubenswrapper[4711]: I1205 12:44:47.950452 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfxjc\" (UniqueName: \"kubernetes.io/projected/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-kube-api-access-tfxjc\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n6cnz\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:48 crc kubenswrapper[4711]: I1205 12:44:48.052094 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n6cnz\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:48 crc kubenswrapper[4711]: I1205 12:44:48.052185 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n6cnz\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:48 crc kubenswrapper[4711]: I1205 12:44:48.052234 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfxjc\" (UniqueName: \"kubernetes.io/projected/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-kube-api-access-tfxjc\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n6cnz\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:48 crc kubenswrapper[4711]: I1205 12:44:48.064196 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n6cnz\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:48 crc kubenswrapper[4711]: I1205 12:44:48.064952 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n6cnz\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:48 crc kubenswrapper[4711]: I1205 12:44:48.071070 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfxjc\" (UniqueName: \"kubernetes.io/projected/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-kube-api-access-tfxjc\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n6cnz\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:48 crc kubenswrapper[4711]: I1205 12:44:48.198453 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:44:48 crc kubenswrapper[4711]: I1205 12:44:48.700586 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz"] Dec 05 12:44:48 crc kubenswrapper[4711]: I1205 12:44:48.817227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" event={"ID":"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc","Type":"ContainerStarted","Data":"5914088d46c6034b00b17f783e5080b3ef9486b2d31df7c28d5320269cdf3d0b"} Dec 05 12:44:49 crc kubenswrapper[4711]: I1205 12:44:49.838699 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" event={"ID":"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc","Type":"ContainerStarted","Data":"5c54793f77f831f1237589fbb2ac2441bed1d3e40f26be23f51697e113607ef2"} Dec 05 12:44:49 crc kubenswrapper[4711]: I1205 12:44:49.881723 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" podStartSLOduration=2.399379498 podStartE2EDuration="2.881699732s" podCreationTimestamp="2025-12-05 12:44:47 +0000 UTC" firstStartedPulling="2025-12-05 12:44:48.707763673 +0000 UTC m=+2134.292086003" lastFinishedPulling="2025-12-05 12:44:49.190083907 +0000 UTC m=+2134.774406237" observedRunningTime="2025-12-05 12:44:49.873119783 +0000 UTC m=+2135.457442123" watchObservedRunningTime="2025-12-05 12:44:49.881699732 +0000 UTC m=+2135.466022062" Dec 05 12:44:58 crc kubenswrapper[4711]: I1205 12:44:58.938226 4711 generic.go:334] "Generic (PLEG): container finished" podID="6f0b9185-75d5-4e86-9c18-211ea2a8f3dc" containerID="5c54793f77f831f1237589fbb2ac2441bed1d3e40f26be23f51697e113607ef2" exitCode=0 Dec 05 12:44:58 crc kubenswrapper[4711]: I1205 12:44:58.938314 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" event={"ID":"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc","Type":"ContainerDied","Data":"5c54793f77f831f1237589fbb2ac2441bed1d3e40f26be23f51697e113607ef2"} Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.154349 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c"] Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.157249 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.162635 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.162839 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.205289 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0a4475c-cc65-4438-98a0-fa898baa9ce0-secret-volume\") pod \"collect-profiles-29415645-5qh9c\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.205365 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b42jv\" (UniqueName: \"kubernetes.io/projected/f0a4475c-cc65-4438-98a0-fa898baa9ce0-kube-api-access-b42jv\") pod \"collect-profiles-29415645-5qh9c\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.205552 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0a4475c-cc65-4438-98a0-fa898baa9ce0-config-volume\") pod \"collect-profiles-29415645-5qh9c\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.307408 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0a4475c-cc65-4438-98a0-fa898baa9ce0-secret-volume\") pod \"collect-profiles-29415645-5qh9c\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.307680 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b42jv\" (UniqueName: \"kubernetes.io/projected/f0a4475c-cc65-4438-98a0-fa898baa9ce0-kube-api-access-b42jv\") pod \"collect-profiles-29415645-5qh9c\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.307818 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0a4475c-cc65-4438-98a0-fa898baa9ce0-config-volume\") pod \"collect-profiles-29415645-5qh9c\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.308831 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0a4475c-cc65-4438-98a0-fa898baa9ce0-config-volume\") pod \"collect-profiles-29415645-5qh9c\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.325754 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0a4475c-cc65-4438-98a0-fa898baa9ce0-secret-volume\") pod \"collect-profiles-29415645-5qh9c\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.326879 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b42jv\" (UniqueName: \"kubernetes.io/projected/f0a4475c-cc65-4438-98a0-fa898baa9ce0-kube-api-access-b42jv\") pod \"collect-profiles-29415645-5qh9c\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.376476 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c"] Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.477896 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.684044 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.818744 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfxjc\" (UniqueName: \"kubernetes.io/projected/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-kube-api-access-tfxjc\") pod \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.818844 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-ssh-key\") pod \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.819003 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-inventory\") pod \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\" (UID: \"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc\") " Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.825580 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-kube-api-access-tfxjc" (OuterVolumeSpecName: "kube-api-access-tfxjc") pod "6f0b9185-75d5-4e86-9c18-211ea2a8f3dc" (UID: "6f0b9185-75d5-4e86-9c18-211ea2a8f3dc"). InnerVolumeSpecName "kube-api-access-tfxjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.846768 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6f0b9185-75d5-4e86-9c18-211ea2a8f3dc" (UID: "6f0b9185-75d5-4e86-9c18-211ea2a8f3dc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.848035 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-inventory" (OuterVolumeSpecName: "inventory") pod "6f0b9185-75d5-4e86-9c18-211ea2a8f3dc" (UID: "6f0b9185-75d5-4e86-9c18-211ea2a8f3dc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.922281 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfxjc\" (UniqueName: \"kubernetes.io/projected/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-kube-api-access-tfxjc\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.922338 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.922350 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f0b9185-75d5-4e86-9c18-211ea2a8f3dc-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.960841 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" event={"ID":"6f0b9185-75d5-4e86-9c18-211ea2a8f3dc","Type":"ContainerDied","Data":"5914088d46c6034b00b17f783e5080b3ef9486b2d31df7c28d5320269cdf3d0b"} Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.961193 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5914088d46c6034b00b17f783e5080b3ef9486b2d31df7c28d5320269cdf3d0b" Dec 05 12:45:00 crc kubenswrapper[4711]: I1205 12:45:00.960922 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n6cnz" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.020303 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c"] Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.072313 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r"] Dec 05 12:45:01 crc kubenswrapper[4711]: E1205 12:45:01.073044 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f0b9185-75d5-4e86-9c18-211ea2a8f3dc" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.073075 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f0b9185-75d5-4e86-9c18-211ea2a8f3dc" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.073733 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f0b9185-75d5-4e86-9c18-211ea2a8f3dc" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.080229 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.084198 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.084287 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.084365 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.084553 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.085115 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r"] Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.125991 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktwhw\" (UniqueName: \"kubernetes.io/projected/8bd300af-c4e6-4641-a598-0ab3af20c754-kube-api-access-ktwhw\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.126094 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.126126 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.227987 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktwhw\" (UniqueName: \"kubernetes.io/projected/8bd300af-c4e6-4641-a598-0ab3af20c754-kube-api-access-ktwhw\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.228099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.228130 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.234007 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.234029 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.245946 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktwhw\" (UniqueName: \"kubernetes.io/projected/8bd300af-c4e6-4641-a598-0ab3af20c754-kube-api-access-ktwhw\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.407572 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:01 crc kubenswrapper[4711]: W1205 12:45:01.932997 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bd300af_c4e6_4641_a598_0ab3af20c754.slice/crio-7758ad6cf41a0f61be4983c50d27b60d2cdfcf14d5b0586f4ee28a2b43c73379 WatchSource:0}: Error finding container 7758ad6cf41a0f61be4983c50d27b60d2cdfcf14d5b0586f4ee28a2b43c73379: Status 404 returned error can't find the container with id 7758ad6cf41a0f61be4983c50d27b60d2cdfcf14d5b0586f4ee28a2b43c73379 Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.943664 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r"] Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.973505 4711 generic.go:334] "Generic (PLEG): container finished" podID="f0a4475c-cc65-4438-98a0-fa898baa9ce0" containerID="ac4ef4bea8181282bb0b7a8f0cac7a0cfe7739d57bc2c2ff3efc358432121ec3" exitCode=0 Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.973606 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" event={"ID":"f0a4475c-cc65-4438-98a0-fa898baa9ce0","Type":"ContainerDied","Data":"ac4ef4bea8181282bb0b7a8f0cac7a0cfe7739d57bc2c2ff3efc358432121ec3"} Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.973633 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" event={"ID":"f0a4475c-cc65-4438-98a0-fa898baa9ce0","Type":"ContainerStarted","Data":"4266ba2f21c897fb4dee3d601f4f04d1597b6d482f3f46173f82acc3f9076096"} Dec 05 12:45:01 crc kubenswrapper[4711]: I1205 12:45:01.976530 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" event={"ID":"8bd300af-c4e6-4641-a598-0ab3af20c754","Type":"ContainerStarted","Data":"7758ad6cf41a0f61be4983c50d27b60d2cdfcf14d5b0586f4ee28a2b43c73379"} Dec 05 12:45:02 crc kubenswrapper[4711]: I1205 12:45:02.985516 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" event={"ID":"8bd300af-c4e6-4641-a598-0ab3af20c754","Type":"ContainerStarted","Data":"63af250ebb53acc26243dfca89f3b54d1566f1145617e9c349651742615e777f"} Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.011437 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" podStartSLOduration=1.601865164 podStartE2EDuration="2.011418101s" podCreationTimestamp="2025-12-05 12:45:01 +0000 UTC" firstStartedPulling="2025-12-05 12:45:01.936669975 +0000 UTC m=+2147.520992305" lastFinishedPulling="2025-12-05 12:45:02.346222872 +0000 UTC m=+2147.930545242" observedRunningTime="2025-12-05 12:45:03.000607576 +0000 UTC m=+2148.584929916" watchObservedRunningTime="2025-12-05 12:45:03.011418101 +0000 UTC m=+2148.595740431" Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.331094 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.369562 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b42jv\" (UniqueName: \"kubernetes.io/projected/f0a4475c-cc65-4438-98a0-fa898baa9ce0-kube-api-access-b42jv\") pod \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.369692 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0a4475c-cc65-4438-98a0-fa898baa9ce0-config-volume\") pod \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.369760 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0a4475c-cc65-4438-98a0-fa898baa9ce0-secret-volume\") pod \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\" (UID: \"f0a4475c-cc65-4438-98a0-fa898baa9ce0\") " Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.371650 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0a4475c-cc65-4438-98a0-fa898baa9ce0-config-volume" (OuterVolumeSpecName: "config-volume") pod "f0a4475c-cc65-4438-98a0-fa898baa9ce0" (UID: "f0a4475c-cc65-4438-98a0-fa898baa9ce0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.376195 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0a4475c-cc65-4438-98a0-fa898baa9ce0-kube-api-access-b42jv" (OuterVolumeSpecName: "kube-api-access-b42jv") pod "f0a4475c-cc65-4438-98a0-fa898baa9ce0" (UID: "f0a4475c-cc65-4438-98a0-fa898baa9ce0"). InnerVolumeSpecName "kube-api-access-b42jv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.377781 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0a4475c-cc65-4438-98a0-fa898baa9ce0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f0a4475c-cc65-4438-98a0-fa898baa9ce0" (UID: "f0a4475c-cc65-4438-98a0-fa898baa9ce0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.473732 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b42jv\" (UniqueName: \"kubernetes.io/projected/f0a4475c-cc65-4438-98a0-fa898baa9ce0-kube-api-access-b42jv\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.474072 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0a4475c-cc65-4438-98a0-fa898baa9ce0-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:03 crc kubenswrapper[4711]: I1205 12:45:03.474087 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0a4475c-cc65-4438-98a0-fa898baa9ce0-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4711]: I1205 12:45:04.008234 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" Dec 05 12:45:04 crc kubenswrapper[4711]: I1205 12:45:04.008231 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c" event={"ID":"f0a4475c-cc65-4438-98a0-fa898baa9ce0","Type":"ContainerDied","Data":"4266ba2f21c897fb4dee3d601f4f04d1597b6d482f3f46173f82acc3f9076096"} Dec 05 12:45:04 crc kubenswrapper[4711]: I1205 12:45:04.008366 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4266ba2f21c897fb4dee3d601f4f04d1597b6d482f3f46173f82acc3f9076096" Dec 05 12:45:04 crc kubenswrapper[4711]: I1205 12:45:04.408543 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl"] Dec 05 12:45:04 crc kubenswrapper[4711]: I1205 12:45:04.418209 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-csfhl"] Dec 05 12:45:04 crc kubenswrapper[4711]: I1205 12:45:04.697120 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="564b439c-0ac6-43d5-afa8-2379ea73a71e" path="/var/lib/kubelet/pods/564b439c-0ac6-43d5-afa8-2379ea73a71e/volumes" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.468868 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b6k9h"] Dec 05 12:45:06 crc kubenswrapper[4711]: E1205 12:45:06.469329 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0a4475c-cc65-4438-98a0-fa898baa9ce0" containerName="collect-profiles" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.469343 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0a4475c-cc65-4438-98a0-fa898baa9ce0" containerName="collect-profiles" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.469693 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0a4475c-cc65-4438-98a0-fa898baa9ce0" containerName="collect-profiles" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.471470 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.485734 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b6k9h"] Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.531112 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-utilities\") pod \"redhat-marketplace-b6k9h\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.531526 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc5vh\" (UniqueName: \"kubernetes.io/projected/aa29719c-44d1-4d90-9307-c59520df44fa-kube-api-access-wc5vh\") pod \"redhat-marketplace-b6k9h\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.531584 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-catalog-content\") pod \"redhat-marketplace-b6k9h\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.633330 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc5vh\" (UniqueName: \"kubernetes.io/projected/aa29719c-44d1-4d90-9307-c59520df44fa-kube-api-access-wc5vh\") pod \"redhat-marketplace-b6k9h\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.633378 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-catalog-content\") pod \"redhat-marketplace-b6k9h\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.633464 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-utilities\") pod \"redhat-marketplace-b6k9h\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.633909 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-utilities\") pod \"redhat-marketplace-b6k9h\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.634203 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-catalog-content\") pod \"redhat-marketplace-b6k9h\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.652782 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc5vh\" (UniqueName: \"kubernetes.io/projected/aa29719c-44d1-4d90-9307-c59520df44fa-kube-api-access-wc5vh\") pod \"redhat-marketplace-b6k9h\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:06 crc kubenswrapper[4711]: I1205 12:45:06.789883 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:07 crc kubenswrapper[4711]: I1205 12:45:07.253760 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b6k9h"] Dec 05 12:45:08 crc kubenswrapper[4711]: I1205 12:45:08.048524 4711 generic.go:334] "Generic (PLEG): container finished" podID="aa29719c-44d1-4d90-9307-c59520df44fa" containerID="e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80" exitCode=0 Dec 05 12:45:08 crc kubenswrapper[4711]: I1205 12:45:08.048631 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b6k9h" event={"ID":"aa29719c-44d1-4d90-9307-c59520df44fa","Type":"ContainerDied","Data":"e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80"} Dec 05 12:45:08 crc kubenswrapper[4711]: I1205 12:45:08.048857 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b6k9h" event={"ID":"aa29719c-44d1-4d90-9307-c59520df44fa","Type":"ContainerStarted","Data":"8c82fb8c515e85721048a6e8723f75bda63293a013770b71d5f88c8b0f423b98"} Dec 05 12:45:09 crc kubenswrapper[4711]: I1205 12:45:09.063979 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b6k9h" event={"ID":"aa29719c-44d1-4d90-9307-c59520df44fa","Type":"ContainerStarted","Data":"0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65"} Dec 05 12:45:10 crc kubenswrapper[4711]: I1205 12:45:10.078472 4711 generic.go:334] "Generic (PLEG): container finished" podID="aa29719c-44d1-4d90-9307-c59520df44fa" containerID="0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65" exitCode=0 Dec 05 12:45:10 crc kubenswrapper[4711]: I1205 12:45:10.078525 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b6k9h" event={"ID":"aa29719c-44d1-4d90-9307-c59520df44fa","Type":"ContainerDied","Data":"0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65"} Dec 05 12:45:11 crc kubenswrapper[4711]: I1205 12:45:11.089179 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b6k9h" event={"ID":"aa29719c-44d1-4d90-9307-c59520df44fa","Type":"ContainerStarted","Data":"320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf"} Dec 05 12:45:11 crc kubenswrapper[4711]: I1205 12:45:11.108206 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b6k9h" podStartSLOduration=2.67008022 podStartE2EDuration="5.10818434s" podCreationTimestamp="2025-12-05 12:45:06 +0000 UTC" firstStartedPulling="2025-12-05 12:45:08.05234103 +0000 UTC m=+2153.636663370" lastFinishedPulling="2025-12-05 12:45:10.49044514 +0000 UTC m=+2156.074767490" observedRunningTime="2025-12-05 12:45:11.10574318 +0000 UTC m=+2156.690065520" watchObservedRunningTime="2025-12-05 12:45:11.10818434 +0000 UTC m=+2156.692506670" Dec 05 12:45:13 crc kubenswrapper[4711]: I1205 12:45:13.107444 4711 generic.go:334] "Generic (PLEG): container finished" podID="8bd300af-c4e6-4641-a598-0ab3af20c754" containerID="63af250ebb53acc26243dfca89f3b54d1566f1145617e9c349651742615e777f" exitCode=0 Dec 05 12:45:13 crc kubenswrapper[4711]: I1205 12:45:13.107527 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" event={"ID":"8bd300af-c4e6-4641-a598-0ab3af20c754","Type":"ContainerDied","Data":"63af250ebb53acc26243dfca89f3b54d1566f1145617e9c349651742615e777f"} Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.542945 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.611632 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-ssh-key\") pod \"8bd300af-c4e6-4641-a598-0ab3af20c754\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.611878 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktwhw\" (UniqueName: \"kubernetes.io/projected/8bd300af-c4e6-4641-a598-0ab3af20c754-kube-api-access-ktwhw\") pod \"8bd300af-c4e6-4641-a598-0ab3af20c754\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.611932 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-inventory\") pod \"8bd300af-c4e6-4641-a598-0ab3af20c754\" (UID: \"8bd300af-c4e6-4641-a598-0ab3af20c754\") " Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.619670 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bd300af-c4e6-4641-a598-0ab3af20c754-kube-api-access-ktwhw" (OuterVolumeSpecName: "kube-api-access-ktwhw") pod "8bd300af-c4e6-4641-a598-0ab3af20c754" (UID: "8bd300af-c4e6-4641-a598-0ab3af20c754"). InnerVolumeSpecName "kube-api-access-ktwhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.648082 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-inventory" (OuterVolumeSpecName: "inventory") pod "8bd300af-c4e6-4641-a598-0ab3af20c754" (UID: "8bd300af-c4e6-4641-a598-0ab3af20c754"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.650918 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8bd300af-c4e6-4641-a598-0ab3af20c754" (UID: "8bd300af-c4e6-4641-a598-0ab3af20c754"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.715121 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.715164 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktwhw\" (UniqueName: \"kubernetes.io/projected/8bd300af-c4e6-4641-a598-0ab3af20c754-kube-api-access-ktwhw\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:14 crc kubenswrapper[4711]: I1205 12:45:14.715175 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bd300af-c4e6-4641-a598-0ab3af20c754-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.133070 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" event={"ID":"8bd300af-c4e6-4641-a598-0ab3af20c754","Type":"ContainerDied","Data":"7758ad6cf41a0f61be4983c50d27b60d2cdfcf14d5b0586f4ee28a2b43c73379"} Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.133125 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.133129 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7758ad6cf41a0f61be4983c50d27b60d2cdfcf14d5b0586f4ee28a2b43c73379" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.240680 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645"] Dec 05 12:45:15 crc kubenswrapper[4711]: E1205 12:45:15.241171 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bd300af-c4e6-4641-a598-0ab3af20c754" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.241201 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bd300af-c4e6-4641-a598-0ab3af20c754" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.241580 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bd300af-c4e6-4641-a598-0ab3af20c754" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.242447 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.245815 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.245893 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.245940 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.245963 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.246318 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.247369 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.249644 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.252818 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.271630 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645"] Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.328894 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.328969 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.329033 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.329256 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.329507 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.329553 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.329577 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.329712 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.329889 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.329967 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.330037 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.330115 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgrjf\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-kube-api-access-fgrjf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.330164 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.330205 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.432747 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.432940 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.433153 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.433231 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.433286 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.433383 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.433550 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.433626 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.433919 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.434892 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgrjf\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-kube-api-access-fgrjf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.435557 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.436301 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.436516 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.436595 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.440603 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.441594 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.442159 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.442672 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.442760 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.444904 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.445303 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.446625 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.447127 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.447679 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.448819 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.454037 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.460279 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgrjf\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-kube-api-access-fgrjf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.460537 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9m645\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:15 crc kubenswrapper[4711]: I1205 12:45:15.561779 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:16 crc kubenswrapper[4711]: I1205 12:45:16.097162 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645"] Dec 05 12:45:16 crc kubenswrapper[4711]: I1205 12:45:16.143960 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" event={"ID":"8729bdd8-ba47-4ba5-9ff4-71ec183635e7","Type":"ContainerStarted","Data":"be5019d51ffb2803a4f4fce5a799add21e33d8dcd5fcbbdb0c6279201d071a72"} Dec 05 12:45:16 crc kubenswrapper[4711]: I1205 12:45:16.790758 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:16 crc kubenswrapper[4711]: I1205 12:45:16.791535 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:16 crc kubenswrapper[4711]: I1205 12:45:16.839360 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:17 crc kubenswrapper[4711]: I1205 12:45:17.157111 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" event={"ID":"8729bdd8-ba47-4ba5-9ff4-71ec183635e7","Type":"ContainerStarted","Data":"01c5b03ebf7dceee21111dc99b7ed74d21ae457a54c9c6708f7ac7ffaff67b53"} Dec 05 12:45:17 crc kubenswrapper[4711]: I1205 12:45:17.185614 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" podStartSLOduration=1.761028945 podStartE2EDuration="2.185588428s" podCreationTimestamp="2025-12-05 12:45:15 +0000 UTC" firstStartedPulling="2025-12-05 12:45:16.103276078 +0000 UTC m=+2161.687598418" lastFinishedPulling="2025-12-05 12:45:16.527835561 +0000 UTC m=+2162.112157901" observedRunningTime="2025-12-05 12:45:17.178354682 +0000 UTC m=+2162.762677012" watchObservedRunningTime="2025-12-05 12:45:17.185588428 +0000 UTC m=+2162.769910758" Dec 05 12:45:17 crc kubenswrapper[4711]: I1205 12:45:17.210836 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:17 crc kubenswrapper[4711]: I1205 12:45:17.260982 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b6k9h"] Dec 05 12:45:18 crc kubenswrapper[4711]: I1205 12:45:18.300498 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:45:18 crc kubenswrapper[4711]: I1205 12:45:18.300832 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.181316 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b6k9h" podUID="aa29719c-44d1-4d90-9307-c59520df44fa" containerName="registry-server" containerID="cri-o://320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf" gracePeriod=2 Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.669019 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.751692 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc5vh\" (UniqueName: \"kubernetes.io/projected/aa29719c-44d1-4d90-9307-c59520df44fa-kube-api-access-wc5vh\") pod \"aa29719c-44d1-4d90-9307-c59520df44fa\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.751803 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-catalog-content\") pod \"aa29719c-44d1-4d90-9307-c59520df44fa\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.751825 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-utilities\") pod \"aa29719c-44d1-4d90-9307-c59520df44fa\" (UID: \"aa29719c-44d1-4d90-9307-c59520df44fa\") " Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.753295 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-utilities" (OuterVolumeSpecName: "utilities") pod "aa29719c-44d1-4d90-9307-c59520df44fa" (UID: "aa29719c-44d1-4d90-9307-c59520df44fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.759569 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa29719c-44d1-4d90-9307-c59520df44fa-kube-api-access-wc5vh" (OuterVolumeSpecName: "kube-api-access-wc5vh") pod "aa29719c-44d1-4d90-9307-c59520df44fa" (UID: "aa29719c-44d1-4d90-9307-c59520df44fa"). InnerVolumeSpecName "kube-api-access-wc5vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.774346 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa29719c-44d1-4d90-9307-c59520df44fa" (UID: "aa29719c-44d1-4d90-9307-c59520df44fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.855511 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc5vh\" (UniqueName: \"kubernetes.io/projected/aa29719c-44d1-4d90-9307-c59520df44fa-kube-api-access-wc5vh\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.855551 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:19 crc kubenswrapper[4711]: I1205 12:45:19.855564 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa29719c-44d1-4d90-9307-c59520df44fa-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.197588 4711 generic.go:334] "Generic (PLEG): container finished" podID="aa29719c-44d1-4d90-9307-c59520df44fa" containerID="320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf" exitCode=0 Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.197643 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b6k9h" event={"ID":"aa29719c-44d1-4d90-9307-c59520df44fa","Type":"ContainerDied","Data":"320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf"} Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.197676 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b6k9h" event={"ID":"aa29719c-44d1-4d90-9307-c59520df44fa","Type":"ContainerDied","Data":"8c82fb8c515e85721048a6e8723f75bda63293a013770b71d5f88c8b0f423b98"} Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.197694 4711 scope.go:117] "RemoveContainer" containerID="320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.197861 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b6k9h" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.227970 4711 scope.go:117] "RemoveContainer" containerID="0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.231964 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b6k9h"] Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.243210 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b6k9h"] Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.252189 4711 scope.go:117] "RemoveContainer" containerID="e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.288979 4711 scope.go:117] "RemoveContainer" containerID="320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf" Dec 05 12:45:20 crc kubenswrapper[4711]: E1205 12:45:20.289363 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf\": container with ID starting with 320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf not found: ID does not exist" containerID="320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.289815 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf"} err="failed to get container status \"320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf\": rpc error: code = NotFound desc = could not find container \"320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf\": container with ID starting with 320fc62db8727388cae2e6e992558cff22ac9388718f581bc71cf846a70c7ccf not found: ID does not exist" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.289856 4711 scope.go:117] "RemoveContainer" containerID="0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65" Dec 05 12:45:20 crc kubenswrapper[4711]: E1205 12:45:20.290676 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65\": container with ID starting with 0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65 not found: ID does not exist" containerID="0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.290706 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65"} err="failed to get container status \"0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65\": rpc error: code = NotFound desc = could not find container \"0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65\": container with ID starting with 0f99a5f7e01a70c3e1f15299c0d3ce5d1425e517b04a9ab6d2fb4cbafebc5f65 not found: ID does not exist" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.290722 4711 scope.go:117] "RemoveContainer" containerID="e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80" Dec 05 12:45:20 crc kubenswrapper[4711]: E1205 12:45:20.290952 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80\": container with ID starting with e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80 not found: ID does not exist" containerID="e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.290985 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80"} err="failed to get container status \"e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80\": rpc error: code = NotFound desc = could not find container \"e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80\": container with ID starting with e667ddbed21957ab926e94b1e9d507c15336d66bb03f8328852a4c8ad65fea80 not found: ID does not exist" Dec 05 12:45:20 crc kubenswrapper[4711]: I1205 12:45:20.694964 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa29719c-44d1-4d90-9307-c59520df44fa" path="/var/lib/kubelet/pods/aa29719c-44d1-4d90-9307-c59520df44fa/volumes" Dec 05 12:45:30 crc kubenswrapper[4711]: I1205 12:45:30.297170 4711 scope.go:117] "RemoveContainer" containerID="e8a761d8a23b75eb445fcc0ab19366eb184e0a7bb2921912e2034b73842f6c7f" Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.789424 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xtv4s"] Dec 05 12:45:47 crc kubenswrapper[4711]: E1205 12:45:47.791718 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa29719c-44d1-4d90-9307-c59520df44fa" containerName="registry-server" Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.791824 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa29719c-44d1-4d90-9307-c59520df44fa" containerName="registry-server" Dec 05 12:45:47 crc kubenswrapper[4711]: E1205 12:45:47.791933 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa29719c-44d1-4d90-9307-c59520df44fa" containerName="extract-utilities" Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.792009 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa29719c-44d1-4d90-9307-c59520df44fa" containerName="extract-utilities" Dec 05 12:45:47 crc kubenswrapper[4711]: E1205 12:45:47.792104 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa29719c-44d1-4d90-9307-c59520df44fa" containerName="extract-content" Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.792181 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa29719c-44d1-4d90-9307-c59520df44fa" containerName="extract-content" Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.792519 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa29719c-44d1-4d90-9307-c59520df44fa" containerName="registry-server" Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.794605 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.808056 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xtv4s"] Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.948284 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-catalog-content\") pod \"redhat-operators-xtv4s\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.948909 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-utilities\") pod \"redhat-operators-xtv4s\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:47 crc kubenswrapper[4711]: I1205 12:45:47.949356 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqvbd\" (UniqueName: \"kubernetes.io/projected/64a0f860-1826-4f9f-8899-41044f2992f0-kube-api-access-mqvbd\") pod \"redhat-operators-xtv4s\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.051744 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqvbd\" (UniqueName: \"kubernetes.io/projected/64a0f860-1826-4f9f-8899-41044f2992f0-kube-api-access-mqvbd\") pod \"redhat-operators-xtv4s\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.051825 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-catalog-content\") pod \"redhat-operators-xtv4s\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.051852 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-utilities\") pod \"redhat-operators-xtv4s\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.052300 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-catalog-content\") pod \"redhat-operators-xtv4s\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.052319 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-utilities\") pod \"redhat-operators-xtv4s\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.076180 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqvbd\" (UniqueName: \"kubernetes.io/projected/64a0f860-1826-4f9f-8899-41044f2992f0-kube-api-access-mqvbd\") pod \"redhat-operators-xtv4s\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.115896 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.301065 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.301352 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:45:48 crc kubenswrapper[4711]: I1205 12:45:48.458937 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xtv4s"] Dec 05 12:45:49 crc kubenswrapper[4711]: I1205 12:45:49.479064 4711 generic.go:334] "Generic (PLEG): container finished" podID="64a0f860-1826-4f9f-8899-41044f2992f0" containerID="620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390" exitCode=0 Dec 05 12:45:49 crc kubenswrapper[4711]: I1205 12:45:49.479170 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtv4s" event={"ID":"64a0f860-1826-4f9f-8899-41044f2992f0","Type":"ContainerDied","Data":"620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390"} Dec 05 12:45:49 crc kubenswrapper[4711]: I1205 12:45:49.479361 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtv4s" event={"ID":"64a0f860-1826-4f9f-8899-41044f2992f0","Type":"ContainerStarted","Data":"3b3fec099d6b497cd62c8078319c4d88c103d104dfb642aab32edff71ca60339"} Dec 05 12:45:50 crc kubenswrapper[4711]: I1205 12:45:50.502747 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtv4s" event={"ID":"64a0f860-1826-4f9f-8899-41044f2992f0","Type":"ContainerStarted","Data":"e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762"} Dec 05 12:45:54 crc kubenswrapper[4711]: I1205 12:45:54.539980 4711 generic.go:334] "Generic (PLEG): container finished" podID="64a0f860-1826-4f9f-8899-41044f2992f0" containerID="e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762" exitCode=0 Dec 05 12:45:54 crc kubenswrapper[4711]: I1205 12:45:54.540099 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtv4s" event={"ID":"64a0f860-1826-4f9f-8899-41044f2992f0","Type":"ContainerDied","Data":"e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762"} Dec 05 12:45:55 crc kubenswrapper[4711]: I1205 12:45:55.551701 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtv4s" event={"ID":"64a0f860-1826-4f9f-8899-41044f2992f0","Type":"ContainerStarted","Data":"460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e"} Dec 05 12:45:55 crc kubenswrapper[4711]: I1205 12:45:55.571459 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xtv4s" podStartSLOduration=3.068362297 podStartE2EDuration="8.571443959s" podCreationTimestamp="2025-12-05 12:45:47 +0000 UTC" firstStartedPulling="2025-12-05 12:45:49.481057522 +0000 UTC m=+2195.065379852" lastFinishedPulling="2025-12-05 12:45:54.984139184 +0000 UTC m=+2200.568461514" observedRunningTime="2025-12-05 12:45:55.568009055 +0000 UTC m=+2201.152331385" watchObservedRunningTime="2025-12-05 12:45:55.571443959 +0000 UTC m=+2201.155766289" Dec 05 12:45:56 crc kubenswrapper[4711]: I1205 12:45:56.561571 4711 generic.go:334] "Generic (PLEG): container finished" podID="8729bdd8-ba47-4ba5-9ff4-71ec183635e7" containerID="01c5b03ebf7dceee21111dc99b7ed74d21ae457a54c9c6708f7ac7ffaff67b53" exitCode=0 Dec 05 12:45:56 crc kubenswrapper[4711]: I1205 12:45:56.561619 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" event={"ID":"8729bdd8-ba47-4ba5-9ff4-71ec183635e7","Type":"ContainerDied","Data":"01c5b03ebf7dceee21111dc99b7ed74d21ae457a54c9c6708f7ac7ffaff67b53"} Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.116602 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.116958 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.412487 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.554748 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-libvirt-combined-ca-bundle\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.555140 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-inventory\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.555228 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-repo-setup-combined-ca-bundle\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.555308 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-telemetry-combined-ca-bundle\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.555435 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-neutron-metadata-combined-ca-bundle\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.555563 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.555661 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-bootstrap-combined-ca-bundle\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.555765 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-ovn-default-certs-0\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.555883 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ovn-combined-ca-bundle\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.555978 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.556062 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.556144 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgrjf\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-kube-api-access-fgrjf\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.556217 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-nova-combined-ca-bundle\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.556311 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ssh-key\") pod \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\" (UID: \"8729bdd8-ba47-4ba5-9ff4-71ec183635e7\") " Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.563033 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.563145 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.564916 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.565159 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.565600 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.566026 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.566073 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.566924 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.566999 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.567586 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.569584 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.574720 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-kube-api-access-fgrjf" (OuterVolumeSpecName: "kube-api-access-fgrjf") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "kube-api-access-fgrjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.594224 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.628519 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-inventory" (OuterVolumeSpecName: "inventory") pod "8729bdd8-ba47-4ba5-9ff4-71ec183635e7" (UID: "8729bdd8-ba47-4ba5-9ff4-71ec183635e7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.658945 4711 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.658990 4711 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659008 4711 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659023 4711 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659039 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgrjf\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-kube-api-access-fgrjf\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659050 4711 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659064 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659075 4711 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659088 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659099 4711 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659110 4711 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659124 4711 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659136 4711 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.659147 4711 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8729bdd8-ba47-4ba5-9ff4-71ec183635e7-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.711993 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg"] Dec 05 12:45:58 crc kubenswrapper[4711]: E1205 12:45:58.712428 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8729bdd8-ba47-4ba5-9ff4-71ec183635e7" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.712445 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8729bdd8-ba47-4ba5-9ff4-71ec183635e7" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.712630 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8729bdd8-ba47-4ba5-9ff4-71ec183635e7" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.713480 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.716162 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.729186 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg"] Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.862458 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.862544 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.862588 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzxmb\" (UniqueName: \"kubernetes.io/projected/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-kube-api-access-bzxmb\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.862634 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.862991 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.945893 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" event={"ID":"8729bdd8-ba47-4ba5-9ff4-71ec183635e7","Type":"ContainerDied","Data":"be5019d51ffb2803a4f4fce5a799add21e33d8dcd5fcbbdb0c6279201d071a72"} Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.945944 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be5019d51ffb2803a4f4fce5a799add21e33d8dcd5fcbbdb0c6279201d071a72" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.945959 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9m645" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.964719 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.965129 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.965192 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.965226 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzxmb\" (UniqueName: \"kubernetes.io/projected/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-kube-api-access-bzxmb\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.965292 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.965621 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.970942 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.972014 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.974031 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:58 crc kubenswrapper[4711]: I1205 12:45:58.985232 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzxmb\" (UniqueName: \"kubernetes.io/projected/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-kube-api-access-bzxmb\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mchpg\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:59 crc kubenswrapper[4711]: I1205 12:45:59.063149 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:45:59 crc kubenswrapper[4711]: I1205 12:45:59.187399 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xtv4s" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" containerName="registry-server" probeResult="failure" output=< Dec 05 12:45:59 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 12:45:59 crc kubenswrapper[4711]: > Dec 05 12:45:59 crc kubenswrapper[4711]: I1205 12:45:59.664691 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg"] Dec 05 12:45:59 crc kubenswrapper[4711]: W1205 12:45:59.666288 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e0f0d4c_40ae_4f75_ba7a_e68edad4c5fc.slice/crio-b9cdf8f8632dcdf62750a2d0a7742b69049202d01fd36f6352ee60bf64386cd6 WatchSource:0}: Error finding container b9cdf8f8632dcdf62750a2d0a7742b69049202d01fd36f6352ee60bf64386cd6: Status 404 returned error can't find the container with id b9cdf8f8632dcdf62750a2d0a7742b69049202d01fd36f6352ee60bf64386cd6 Dec 05 12:45:59 crc kubenswrapper[4711]: I1205 12:45:59.958308 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" event={"ID":"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc","Type":"ContainerStarted","Data":"b9cdf8f8632dcdf62750a2d0a7742b69049202d01fd36f6352ee60bf64386cd6"} Dec 05 12:46:00 crc kubenswrapper[4711]: I1205 12:46:00.971337 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" event={"ID":"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc","Type":"ContainerStarted","Data":"64dd2930f9bd50c0b2b7d755809db99c67a4b11ad6ab39fd8c3d12986a3f3776"} Dec 05 12:46:00 crc kubenswrapper[4711]: I1205 12:46:00.992837 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" podStartSLOduration=2.415565854 podStartE2EDuration="2.992819583s" podCreationTimestamp="2025-12-05 12:45:58 +0000 UTC" firstStartedPulling="2025-12-05 12:45:59.66880501 +0000 UTC m=+2205.253127340" lastFinishedPulling="2025-12-05 12:46:00.246058739 +0000 UTC m=+2205.830381069" observedRunningTime="2025-12-05 12:46:00.984856678 +0000 UTC m=+2206.569179028" watchObservedRunningTime="2025-12-05 12:46:00.992819583 +0000 UTC m=+2206.577141903" Dec 05 12:46:08 crc kubenswrapper[4711]: I1205 12:46:08.165915 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:46:08 crc kubenswrapper[4711]: I1205 12:46:08.214760 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:46:08 crc kubenswrapper[4711]: I1205 12:46:08.403929 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xtv4s"] Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.046547 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xtv4s" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" containerName="registry-server" containerID="cri-o://460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e" gracePeriod=2 Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.508306 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.593659 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-utilities\") pod \"64a0f860-1826-4f9f-8899-41044f2992f0\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.594021 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqvbd\" (UniqueName: \"kubernetes.io/projected/64a0f860-1826-4f9f-8899-41044f2992f0-kube-api-access-mqvbd\") pod \"64a0f860-1826-4f9f-8899-41044f2992f0\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.594180 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-catalog-content\") pod \"64a0f860-1826-4f9f-8899-41044f2992f0\" (UID: \"64a0f860-1826-4f9f-8899-41044f2992f0\") " Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.594766 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-utilities" (OuterVolumeSpecName: "utilities") pod "64a0f860-1826-4f9f-8899-41044f2992f0" (UID: "64a0f860-1826-4f9f-8899-41044f2992f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.594910 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.602954 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64a0f860-1826-4f9f-8899-41044f2992f0-kube-api-access-mqvbd" (OuterVolumeSpecName: "kube-api-access-mqvbd") pod "64a0f860-1826-4f9f-8899-41044f2992f0" (UID: "64a0f860-1826-4f9f-8899-41044f2992f0"). InnerVolumeSpecName "kube-api-access-mqvbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.699468 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqvbd\" (UniqueName: \"kubernetes.io/projected/64a0f860-1826-4f9f-8899-41044f2992f0-kube-api-access-mqvbd\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.720064 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "64a0f860-1826-4f9f-8899-41044f2992f0" (UID: "64a0f860-1826-4f9f-8899-41044f2992f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:10 crc kubenswrapper[4711]: I1205 12:46:10.801970 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a0f860-1826-4f9f-8899-41044f2992f0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.063026 4711 generic.go:334] "Generic (PLEG): container finished" podID="64a0f860-1826-4f9f-8899-41044f2992f0" containerID="460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e" exitCode=0 Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.063083 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtv4s" event={"ID":"64a0f860-1826-4f9f-8899-41044f2992f0","Type":"ContainerDied","Data":"460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e"} Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.063120 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtv4s" event={"ID":"64a0f860-1826-4f9f-8899-41044f2992f0","Type":"ContainerDied","Data":"3b3fec099d6b497cd62c8078319c4d88c103d104dfb642aab32edff71ca60339"} Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.063145 4711 scope.go:117] "RemoveContainer" containerID="460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.063334 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtv4s" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.093525 4711 scope.go:117] "RemoveContainer" containerID="e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.096424 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xtv4s"] Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.106021 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xtv4s"] Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.121828 4711 scope.go:117] "RemoveContainer" containerID="620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.167489 4711 scope.go:117] "RemoveContainer" containerID="460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e" Dec 05 12:46:11 crc kubenswrapper[4711]: E1205 12:46:11.168021 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e\": container with ID starting with 460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e not found: ID does not exist" containerID="460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.168114 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e"} err="failed to get container status \"460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e\": rpc error: code = NotFound desc = could not find container \"460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e\": container with ID starting with 460e20a5287f6c6722a9e702a024115b69038105152f74632c1863486eef6f0e not found: ID does not exist" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.168262 4711 scope.go:117] "RemoveContainer" containerID="e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762" Dec 05 12:46:11 crc kubenswrapper[4711]: E1205 12:46:11.168953 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762\": container with ID starting with e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762 not found: ID does not exist" containerID="e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.169019 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762"} err="failed to get container status \"e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762\": rpc error: code = NotFound desc = could not find container \"e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762\": container with ID starting with e153ca71abf69bae2771a48435952aafc47dab121e3b52a5e01cb47397e2a762 not found: ID does not exist" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.169049 4711 scope.go:117] "RemoveContainer" containerID="620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390" Dec 05 12:46:11 crc kubenswrapper[4711]: E1205 12:46:11.169737 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390\": container with ID starting with 620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390 not found: ID does not exist" containerID="620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390" Dec 05 12:46:11 crc kubenswrapper[4711]: I1205 12:46:11.169764 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390"} err="failed to get container status \"620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390\": rpc error: code = NotFound desc = could not find container \"620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390\": container with ID starting with 620538f1638e2e638a9cc44b19724c0ba3c2da86a0162ed3bbab0fa1dbdf7390 not found: ID does not exist" Dec 05 12:46:12 crc kubenswrapper[4711]: I1205 12:46:12.697260 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" path="/var/lib/kubelet/pods/64a0f860-1826-4f9f-8899-41044f2992f0/volumes" Dec 05 12:46:18 crc kubenswrapper[4711]: I1205 12:46:18.300945 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:46:18 crc kubenswrapper[4711]: I1205 12:46:18.302186 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:46:18 crc kubenswrapper[4711]: I1205 12:46:18.302321 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:46:18 crc kubenswrapper[4711]: I1205 12:46:18.302923 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:46:18 crc kubenswrapper[4711]: I1205 12:46:18.303047 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" gracePeriod=600 Dec 05 12:46:18 crc kubenswrapper[4711]: E1205 12:46:18.424824 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:46:19 crc kubenswrapper[4711]: I1205 12:46:19.141778 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" exitCode=0 Dec 05 12:46:19 crc kubenswrapper[4711]: I1205 12:46:19.142121 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69"} Dec 05 12:46:19 crc kubenswrapper[4711]: I1205 12:46:19.142156 4711 scope.go:117] "RemoveContainer" containerID="0154cbb3ce8d498f21d6fffddd10e58f73c3ea89753011e091ff41fc17837fe9" Dec 05 12:46:19 crc kubenswrapper[4711]: I1205 12:46:19.143127 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:46:19 crc kubenswrapper[4711]: E1205 12:46:19.143437 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:46:31 crc kubenswrapper[4711]: I1205 12:46:31.683128 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:46:31 crc kubenswrapper[4711]: E1205 12:46:31.683872 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:46:43 crc kubenswrapper[4711]: I1205 12:46:43.683779 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:46:43 crc kubenswrapper[4711]: E1205 12:46:43.685029 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:46:58 crc kubenswrapper[4711]: I1205 12:46:58.694027 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:46:58 crc kubenswrapper[4711]: E1205 12:46:58.695143 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:47:05 crc kubenswrapper[4711]: I1205 12:47:05.551825 4711 generic.go:334] "Generic (PLEG): container finished" podID="4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc" containerID="64dd2930f9bd50c0b2b7d755809db99c67a4b11ad6ab39fd8c3d12986a3f3776" exitCode=0 Dec 05 12:47:05 crc kubenswrapper[4711]: I1205 12:47:05.551892 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" event={"ID":"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc","Type":"ContainerDied","Data":"64dd2930f9bd50c0b2b7d755809db99c67a4b11ad6ab39fd8c3d12986a3f3776"} Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.000597 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.064964 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzxmb\" (UniqueName: \"kubernetes.io/projected/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-kube-api-access-bzxmb\") pod \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.065029 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovn-combined-ca-bundle\") pod \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.065081 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ssh-key\") pod \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.065130 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovncontroller-config-0\") pod \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.065196 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-inventory\") pod \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\" (UID: \"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc\") " Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.072525 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc" (UID: "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.072759 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-kube-api-access-bzxmb" (OuterVolumeSpecName: "kube-api-access-bzxmb") pod "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc" (UID: "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc"). InnerVolumeSpecName "kube-api-access-bzxmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.095831 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc" (UID: "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.101904 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-inventory" (OuterVolumeSpecName: "inventory") pod "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc" (UID: "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.102061 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc" (UID: "4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.168219 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzxmb\" (UniqueName: \"kubernetes.io/projected/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-kube-api-access-bzxmb\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.168260 4711 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.168272 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.168281 4711 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.168291 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.573969 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" event={"ID":"4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc","Type":"ContainerDied","Data":"b9cdf8f8632dcdf62750a2d0a7742b69049202d01fd36f6352ee60bf64386cd6"} Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.574011 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9cdf8f8632dcdf62750a2d0a7742b69049202d01fd36f6352ee60bf64386cd6" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.574080 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mchpg" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.763455 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn"] Dec 05 12:47:07 crc kubenswrapper[4711]: E1205 12:47:07.766032 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" containerName="extract-content" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.766063 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" containerName="extract-content" Dec 05 12:47:07 crc kubenswrapper[4711]: E1205 12:47:07.766098 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" containerName="registry-server" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.766109 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" containerName="registry-server" Dec 05 12:47:07 crc kubenswrapper[4711]: E1205 12:47:07.766123 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.766130 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 12:47:07 crc kubenswrapper[4711]: E1205 12:47:07.766150 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" containerName="extract-utilities" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.766157 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" containerName="extract-utilities" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.766437 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="64a0f860-1826-4f9f-8899-41044f2992f0" containerName="registry-server" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.766457 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.767629 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.770416 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.770597 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.770705 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.770825 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.770955 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.771133 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.785980 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn"] Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.890509 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.890862 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.890896 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.890940 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtrr5\" (UniqueName: \"kubernetes.io/projected/8942d3ba-8261-4115-a071-6621ab696423-kube-api-access-wtrr5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.891050 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.891102 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.993437 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.993578 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.993626 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.993703 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtrr5\" (UniqueName: \"kubernetes.io/projected/8942d3ba-8261-4115-a071-6621ab696423-kube-api-access-wtrr5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.993883 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.993981 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.997734 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.998287 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.998459 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:07 crc kubenswrapper[4711]: I1205 12:47:07.999179 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:08 crc kubenswrapper[4711]: I1205 12:47:08.000260 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:08 crc kubenswrapper[4711]: I1205 12:47:08.012528 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtrr5\" (UniqueName: \"kubernetes.io/projected/8942d3ba-8261-4115-a071-6621ab696423-kube-api-access-wtrr5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:08 crc kubenswrapper[4711]: I1205 12:47:08.092076 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:47:08 crc kubenswrapper[4711]: I1205 12:47:08.626149 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn"] Dec 05 12:47:09 crc kubenswrapper[4711]: I1205 12:47:09.590929 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" event={"ID":"8942d3ba-8261-4115-a071-6621ab696423","Type":"ContainerStarted","Data":"eff810457bf9cbb7c142ecbc7f1164142518b9dcd6a03f783a295beda19a5cd1"} Dec 05 12:47:09 crc kubenswrapper[4711]: I1205 12:47:09.591279 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" event={"ID":"8942d3ba-8261-4115-a071-6621ab696423","Type":"ContainerStarted","Data":"13552a72fcc850653272fb740b1c535f314bcf6a6f4321bb649603a32b2ffe3c"} Dec 05 12:47:09 crc kubenswrapper[4711]: I1205 12:47:09.609269 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" podStartSLOduration=2.165773688 podStartE2EDuration="2.609249175s" podCreationTimestamp="2025-12-05 12:47:07 +0000 UTC" firstStartedPulling="2025-12-05 12:47:08.631088891 +0000 UTC m=+2274.215411221" lastFinishedPulling="2025-12-05 12:47:09.074564388 +0000 UTC m=+2274.658886708" observedRunningTime="2025-12-05 12:47:09.607092012 +0000 UTC m=+2275.191414362" watchObservedRunningTime="2025-12-05 12:47:09.609249175 +0000 UTC m=+2275.193571505" Dec 05 12:47:09 crc kubenswrapper[4711]: I1205 12:47:09.683845 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:47:09 crc kubenswrapper[4711]: E1205 12:47:09.684099 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:47:23 crc kubenswrapper[4711]: I1205 12:47:23.683569 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:47:23 crc kubenswrapper[4711]: E1205 12:47:23.685728 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:47:36 crc kubenswrapper[4711]: I1205 12:47:36.683793 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:47:36 crc kubenswrapper[4711]: E1205 12:47:36.684723 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:47:49 crc kubenswrapper[4711]: I1205 12:47:49.683936 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:47:49 crc kubenswrapper[4711]: E1205 12:47:49.684845 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:47:59 crc kubenswrapper[4711]: I1205 12:47:59.035725 4711 generic.go:334] "Generic (PLEG): container finished" podID="8942d3ba-8261-4115-a071-6621ab696423" containerID="eff810457bf9cbb7c142ecbc7f1164142518b9dcd6a03f783a295beda19a5cd1" exitCode=0 Dec 05 12:47:59 crc kubenswrapper[4711]: I1205 12:47:59.035822 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" event={"ID":"8942d3ba-8261-4115-a071-6621ab696423","Type":"ContainerDied","Data":"eff810457bf9cbb7c142ecbc7f1164142518b9dcd6a03f783a295beda19a5cd1"} Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.475774 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.578683 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-metadata-combined-ca-bundle\") pod \"8942d3ba-8261-4115-a071-6621ab696423\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.578814 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtrr5\" (UniqueName: \"kubernetes.io/projected/8942d3ba-8261-4115-a071-6621ab696423-kube-api-access-wtrr5\") pod \"8942d3ba-8261-4115-a071-6621ab696423\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.578934 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-inventory\") pod \"8942d3ba-8261-4115-a071-6621ab696423\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.579014 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-ovn-metadata-agent-neutron-config-0\") pod \"8942d3ba-8261-4115-a071-6621ab696423\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.579070 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-ssh-key\") pod \"8942d3ba-8261-4115-a071-6621ab696423\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.579176 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-nova-metadata-neutron-config-0\") pod \"8942d3ba-8261-4115-a071-6621ab696423\" (UID: \"8942d3ba-8261-4115-a071-6621ab696423\") " Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.587578 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "8942d3ba-8261-4115-a071-6621ab696423" (UID: "8942d3ba-8261-4115-a071-6621ab696423"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.598561 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8942d3ba-8261-4115-a071-6621ab696423-kube-api-access-wtrr5" (OuterVolumeSpecName: "kube-api-access-wtrr5") pod "8942d3ba-8261-4115-a071-6621ab696423" (UID: "8942d3ba-8261-4115-a071-6621ab696423"). InnerVolumeSpecName "kube-api-access-wtrr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.613931 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "8942d3ba-8261-4115-a071-6621ab696423" (UID: "8942d3ba-8261-4115-a071-6621ab696423"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.614150 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "8942d3ba-8261-4115-a071-6621ab696423" (UID: "8942d3ba-8261-4115-a071-6621ab696423"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.615567 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8942d3ba-8261-4115-a071-6621ab696423" (UID: "8942d3ba-8261-4115-a071-6621ab696423"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.628605 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-inventory" (OuterVolumeSpecName: "inventory") pod "8942d3ba-8261-4115-a071-6621ab696423" (UID: "8942d3ba-8261-4115-a071-6621ab696423"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.681783 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtrr5\" (UniqueName: \"kubernetes.io/projected/8942d3ba-8261-4115-a071-6621ab696423-kube-api-access-wtrr5\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.681821 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.681835 4711 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.681849 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.681861 4711 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:00 crc kubenswrapper[4711]: I1205 12:48:00.681873 4711 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8942d3ba-8261-4115-a071-6621ab696423-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.060621 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" event={"ID":"8942d3ba-8261-4115-a071-6621ab696423","Type":"ContainerDied","Data":"13552a72fcc850653272fb740b1c535f314bcf6a6f4321bb649603a32b2ffe3c"} Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.060662 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13552a72fcc850653272fb740b1c535f314bcf6a6f4321bb649603a32b2ffe3c" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.060685 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.163340 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2"] Dec 05 12:48:01 crc kubenswrapper[4711]: E1205 12:48:01.164436 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8942d3ba-8261-4115-a071-6621ab696423" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.164461 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8942d3ba-8261-4115-a071-6621ab696423" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.164668 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8942d3ba-8261-4115-a071-6621ab696423" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.165609 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.168113 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.168351 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.168715 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.168921 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.169138 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.184088 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2"] Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.214490 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.214583 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b55f\" (UniqueName: \"kubernetes.io/projected/2edaac33-536e-4fe8-9579-236b42229841-kube-api-access-7b55f\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.214993 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.215076 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.215232 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.316149 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b55f\" (UniqueName: \"kubernetes.io/projected/2edaac33-536e-4fe8-9579-236b42229841-kube-api-access-7b55f\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.316294 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.316330 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.316416 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.316447 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.324127 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.324151 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.324127 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.325517 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.334729 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b55f\" (UniqueName: \"kubernetes.io/projected/2edaac33-536e-4fe8-9579-236b42229841-kube-api-access-7b55f\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.520173 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:48:01 crc kubenswrapper[4711]: I1205 12:48:01.684607 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:48:01 crc kubenswrapper[4711]: E1205 12:48:01.685132 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:48:02 crc kubenswrapper[4711]: I1205 12:48:02.053625 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2"] Dec 05 12:48:02 crc kubenswrapper[4711]: I1205 12:48:02.071817 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" event={"ID":"2edaac33-536e-4fe8-9579-236b42229841","Type":"ContainerStarted","Data":"8f88f198d1c26847e7ea92b0a6fb5c05944e97c12cd76d3cca74946045db148e"} Dec 05 12:48:03 crc kubenswrapper[4711]: I1205 12:48:03.082750 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" event={"ID":"2edaac33-536e-4fe8-9579-236b42229841","Type":"ContainerStarted","Data":"830ab1c5434b29c0357e7ca7c6a2adebc0a7b732efcfff8ebac8032cd6237ec9"} Dec 05 12:48:03 crc kubenswrapper[4711]: I1205 12:48:03.102096 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" podStartSLOduration=1.609894988 podStartE2EDuration="2.102073845s" podCreationTimestamp="2025-12-05 12:48:01 +0000 UTC" firstStartedPulling="2025-12-05 12:48:02.055625052 +0000 UTC m=+2327.639947382" lastFinishedPulling="2025-12-05 12:48:02.547803909 +0000 UTC m=+2328.132126239" observedRunningTime="2025-12-05 12:48:03.09857068 +0000 UTC m=+2328.682893030" watchObservedRunningTime="2025-12-05 12:48:03.102073845 +0000 UTC m=+2328.686396175" Dec 05 12:48:13 crc kubenswrapper[4711]: I1205 12:48:13.683776 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:48:13 crc kubenswrapper[4711]: E1205 12:48:13.685600 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:48:28 crc kubenswrapper[4711]: I1205 12:48:28.691509 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:48:28 crc kubenswrapper[4711]: E1205 12:48:28.692312 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:48:39 crc kubenswrapper[4711]: I1205 12:48:39.683239 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:48:39 crc kubenswrapper[4711]: E1205 12:48:39.685056 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:48:42 crc kubenswrapper[4711]: I1205 12:48:42.849206 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m8pkc"] Dec 05 12:48:42 crc kubenswrapper[4711]: I1205 12:48:42.852141 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:42 crc kubenswrapper[4711]: I1205 12:48:42.865178 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m8pkc"] Dec 05 12:48:42 crc kubenswrapper[4711]: I1205 12:48:42.967171 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfg8q\" (UniqueName: \"kubernetes.io/projected/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-kube-api-access-lfg8q\") pod \"certified-operators-m8pkc\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:42 crc kubenswrapper[4711]: I1205 12:48:42.967542 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-utilities\") pod \"certified-operators-m8pkc\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:42 crc kubenswrapper[4711]: I1205 12:48:42.967778 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-catalog-content\") pod \"certified-operators-m8pkc\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:43 crc kubenswrapper[4711]: I1205 12:48:43.069669 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfg8q\" (UniqueName: \"kubernetes.io/projected/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-kube-api-access-lfg8q\") pod \"certified-operators-m8pkc\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:43 crc kubenswrapper[4711]: I1205 12:48:43.070060 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-utilities\") pod \"certified-operators-m8pkc\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:43 crc kubenswrapper[4711]: I1205 12:48:43.070342 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-catalog-content\") pod \"certified-operators-m8pkc\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:43 crc kubenswrapper[4711]: I1205 12:48:43.070906 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-utilities\") pod \"certified-operators-m8pkc\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:43 crc kubenswrapper[4711]: I1205 12:48:43.070981 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-catalog-content\") pod \"certified-operators-m8pkc\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:43 crc kubenswrapper[4711]: I1205 12:48:43.094295 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfg8q\" (UniqueName: \"kubernetes.io/projected/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-kube-api-access-lfg8q\") pod \"certified-operators-m8pkc\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:43 crc kubenswrapper[4711]: I1205 12:48:43.172182 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:43 crc kubenswrapper[4711]: I1205 12:48:43.652412 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m8pkc"] Dec 05 12:48:44 crc kubenswrapper[4711]: I1205 12:48:44.504577 4711 generic.go:334] "Generic (PLEG): container finished" podID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerID="3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201" exitCode=0 Dec 05 12:48:44 crc kubenswrapper[4711]: I1205 12:48:44.504862 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8pkc" event={"ID":"61a73f98-9b7e-4d3f-bb2b-af15667cfe04","Type":"ContainerDied","Data":"3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201"} Dec 05 12:48:44 crc kubenswrapper[4711]: I1205 12:48:44.504892 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8pkc" event={"ID":"61a73f98-9b7e-4d3f-bb2b-af15667cfe04","Type":"ContainerStarted","Data":"1306c539717e8f5a7884e327696d69af84e96d4e5d29a30a957873a6cdb3d0ba"} Dec 05 12:48:44 crc kubenswrapper[4711]: I1205 12:48:44.507661 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:48:46 crc kubenswrapper[4711]: I1205 12:48:46.522461 4711 generic.go:334] "Generic (PLEG): container finished" podID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerID="6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8" exitCode=0 Dec 05 12:48:46 crc kubenswrapper[4711]: I1205 12:48:46.522534 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8pkc" event={"ID":"61a73f98-9b7e-4d3f-bb2b-af15667cfe04","Type":"ContainerDied","Data":"6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8"} Dec 05 12:48:47 crc kubenswrapper[4711]: I1205 12:48:47.533564 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8pkc" event={"ID":"61a73f98-9b7e-4d3f-bb2b-af15667cfe04","Type":"ContainerStarted","Data":"bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913"} Dec 05 12:48:47 crc kubenswrapper[4711]: I1205 12:48:47.572953 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m8pkc" podStartSLOduration=3.148100794 podStartE2EDuration="5.57292814s" podCreationTimestamp="2025-12-05 12:48:42 +0000 UTC" firstStartedPulling="2025-12-05 12:48:44.507288281 +0000 UTC m=+2370.091610611" lastFinishedPulling="2025-12-05 12:48:46.932115627 +0000 UTC m=+2372.516437957" observedRunningTime="2025-12-05 12:48:47.566909632 +0000 UTC m=+2373.151231992" watchObservedRunningTime="2025-12-05 12:48:47.57292814 +0000 UTC m=+2373.157250470" Dec 05 12:48:52 crc kubenswrapper[4711]: I1205 12:48:52.683957 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:48:52 crc kubenswrapper[4711]: E1205 12:48:52.684823 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:48:53 crc kubenswrapper[4711]: I1205 12:48:53.172363 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:53 crc kubenswrapper[4711]: I1205 12:48:53.172487 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:53 crc kubenswrapper[4711]: I1205 12:48:53.223831 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:53 crc kubenswrapper[4711]: I1205 12:48:53.627726 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:53 crc kubenswrapper[4711]: I1205 12:48:53.679183 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m8pkc"] Dec 05 12:48:55 crc kubenswrapper[4711]: I1205 12:48:55.601744 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m8pkc" podUID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerName="registry-server" containerID="cri-o://bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913" gracePeriod=2 Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.102813 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.189535 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-catalog-content\") pod \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.189666 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfg8q\" (UniqueName: \"kubernetes.io/projected/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-kube-api-access-lfg8q\") pod \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.189729 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-utilities\") pod \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\" (UID: \"61a73f98-9b7e-4d3f-bb2b-af15667cfe04\") " Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.190635 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-utilities" (OuterVolumeSpecName: "utilities") pod "61a73f98-9b7e-4d3f-bb2b-af15667cfe04" (UID: "61a73f98-9b7e-4d3f-bb2b-af15667cfe04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.196576 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-kube-api-access-lfg8q" (OuterVolumeSpecName: "kube-api-access-lfg8q") pod "61a73f98-9b7e-4d3f-bb2b-af15667cfe04" (UID: "61a73f98-9b7e-4d3f-bb2b-af15667cfe04"). InnerVolumeSpecName "kube-api-access-lfg8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.292051 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfg8q\" (UniqueName: \"kubernetes.io/projected/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-kube-api-access-lfg8q\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.292089 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.612054 4711 generic.go:334] "Generic (PLEG): container finished" podID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerID="bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913" exitCode=0 Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.612112 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8pkc" event={"ID":"61a73f98-9b7e-4d3f-bb2b-af15667cfe04","Type":"ContainerDied","Data":"bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913"} Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.612143 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8pkc" event={"ID":"61a73f98-9b7e-4d3f-bb2b-af15667cfe04","Type":"ContainerDied","Data":"1306c539717e8f5a7884e327696d69af84e96d4e5d29a30a957873a6cdb3d0ba"} Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.612164 4711 scope.go:117] "RemoveContainer" containerID="bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.612195 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m8pkc" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.636512 4711 scope.go:117] "RemoveContainer" containerID="6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.662560 4711 scope.go:117] "RemoveContainer" containerID="3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.705856 4711 scope.go:117] "RemoveContainer" containerID="bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913" Dec 05 12:48:56 crc kubenswrapper[4711]: E1205 12:48:56.706186 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913\": container with ID starting with bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913 not found: ID does not exist" containerID="bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.706223 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913"} err="failed to get container status \"bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913\": rpc error: code = NotFound desc = could not find container \"bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913\": container with ID starting with bbfdd62bb27dc99beb614bb80a6f3a0e92aada264132774a7fa49999ae842913 not found: ID does not exist" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.706246 4711 scope.go:117] "RemoveContainer" containerID="6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8" Dec 05 12:48:56 crc kubenswrapper[4711]: E1205 12:48:56.706922 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8\": container with ID starting with 6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8 not found: ID does not exist" containerID="6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.707053 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8"} err="failed to get container status \"6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8\": rpc error: code = NotFound desc = could not find container \"6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8\": container with ID starting with 6519df6ed3387533dc6590fc073a0677551b08b79f830cdc3ba2b8c078d8f0e8 not found: ID does not exist" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.707163 4711 scope.go:117] "RemoveContainer" containerID="3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201" Dec 05 12:48:56 crc kubenswrapper[4711]: E1205 12:48:56.707825 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201\": container with ID starting with 3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201 not found: ID does not exist" containerID="3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.707885 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201"} err="failed to get container status \"3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201\": rpc error: code = NotFound desc = could not find container \"3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201\": container with ID starting with 3734ddeabe355996bb0e8dbb978e5b1e8cd9ca361e395905f8991e947022c201 not found: ID does not exist" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.744816 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61a73f98-9b7e-4d3f-bb2b-af15667cfe04" (UID: "61a73f98-9b7e-4d3f-bb2b-af15667cfe04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.802099 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61a73f98-9b7e-4d3f-bb2b-af15667cfe04-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.951356 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m8pkc"] Dec 05 12:48:56 crc kubenswrapper[4711]: I1205 12:48:56.962608 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m8pkc"] Dec 05 12:48:58 crc kubenswrapper[4711]: I1205 12:48:58.697923 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" path="/var/lib/kubelet/pods/61a73f98-9b7e-4d3f-bb2b-af15667cfe04/volumes" Dec 05 12:49:07 crc kubenswrapper[4711]: I1205 12:49:07.683091 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:49:07 crc kubenswrapper[4711]: E1205 12:49:07.684156 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:49:21 crc kubenswrapper[4711]: I1205 12:49:21.684155 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:49:21 crc kubenswrapper[4711]: E1205 12:49:21.685140 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:49:36 crc kubenswrapper[4711]: I1205 12:49:36.683289 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:49:36 crc kubenswrapper[4711]: E1205 12:49:36.684434 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.396410 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-26hds"] Dec 05 12:49:41 crc kubenswrapper[4711]: E1205 12:49:41.397526 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerName="registry-server" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.397544 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerName="registry-server" Dec 05 12:49:41 crc kubenswrapper[4711]: E1205 12:49:41.397555 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerName="extract-utilities" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.397563 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerName="extract-utilities" Dec 05 12:49:41 crc kubenswrapper[4711]: E1205 12:49:41.397591 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerName="extract-content" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.397599 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerName="extract-content" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.397876 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="61a73f98-9b7e-4d3f-bb2b-af15667cfe04" containerName="registry-server" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.399979 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.407965 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-26hds"] Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.503896 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c56cf\" (UniqueName: \"kubernetes.io/projected/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-kube-api-access-c56cf\") pod \"community-operators-26hds\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.504222 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-utilities\") pod \"community-operators-26hds\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.507611 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-catalog-content\") pod \"community-operators-26hds\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.609655 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-catalog-content\") pod \"community-operators-26hds\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.609718 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c56cf\" (UniqueName: \"kubernetes.io/projected/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-kube-api-access-c56cf\") pod \"community-operators-26hds\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.609760 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-utilities\") pod \"community-operators-26hds\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.610324 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-catalog-content\") pod \"community-operators-26hds\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.610327 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-utilities\") pod \"community-operators-26hds\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.635356 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c56cf\" (UniqueName: \"kubernetes.io/projected/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-kube-api-access-c56cf\") pod \"community-operators-26hds\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:41 crc kubenswrapper[4711]: I1205 12:49:41.727087 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:42 crc kubenswrapper[4711]: I1205 12:49:42.290148 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-26hds"] Dec 05 12:49:43 crc kubenswrapper[4711]: I1205 12:49:43.118470 4711 generic.go:334] "Generic (PLEG): container finished" podID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerID="ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b" exitCode=0 Dec 05 12:49:43 crc kubenswrapper[4711]: I1205 12:49:43.118518 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-26hds" event={"ID":"b3cc8b59-9e8d-4150-8234-3b56aeaa1642","Type":"ContainerDied","Data":"ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b"} Dec 05 12:49:43 crc kubenswrapper[4711]: I1205 12:49:43.118996 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-26hds" event={"ID":"b3cc8b59-9e8d-4150-8234-3b56aeaa1642","Type":"ContainerStarted","Data":"2a6b2332fdb63b26d58f4df4c1178fd72ba72ada279bdbb079d46789178489f3"} Dec 05 12:49:44 crc kubenswrapper[4711]: I1205 12:49:44.129991 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-26hds" event={"ID":"b3cc8b59-9e8d-4150-8234-3b56aeaa1642","Type":"ContainerStarted","Data":"be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62"} Dec 05 12:49:45 crc kubenswrapper[4711]: I1205 12:49:45.140099 4711 generic.go:334] "Generic (PLEG): container finished" podID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerID="be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62" exitCode=0 Dec 05 12:49:45 crc kubenswrapper[4711]: I1205 12:49:45.140327 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-26hds" event={"ID":"b3cc8b59-9e8d-4150-8234-3b56aeaa1642","Type":"ContainerDied","Data":"be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62"} Dec 05 12:49:46 crc kubenswrapper[4711]: I1205 12:49:46.150938 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-26hds" event={"ID":"b3cc8b59-9e8d-4150-8234-3b56aeaa1642","Type":"ContainerStarted","Data":"2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389"} Dec 05 12:49:46 crc kubenswrapper[4711]: I1205 12:49:46.172506 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-26hds" podStartSLOduration=2.7866512610000003 podStartE2EDuration="5.172481842s" podCreationTimestamp="2025-12-05 12:49:41 +0000 UTC" firstStartedPulling="2025-12-05 12:49:43.121153512 +0000 UTC m=+2428.705475842" lastFinishedPulling="2025-12-05 12:49:45.506984093 +0000 UTC m=+2431.091306423" observedRunningTime="2025-12-05 12:49:46.168565516 +0000 UTC m=+2431.752887866" watchObservedRunningTime="2025-12-05 12:49:46.172481842 +0000 UTC m=+2431.756804172" Dec 05 12:49:47 crc kubenswrapper[4711]: I1205 12:49:47.684434 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:49:47 crc kubenswrapper[4711]: E1205 12:49:47.685045 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:49:51 crc kubenswrapper[4711]: I1205 12:49:51.727871 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:51 crc kubenswrapper[4711]: I1205 12:49:51.728445 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:51 crc kubenswrapper[4711]: I1205 12:49:51.772862 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:52 crc kubenswrapper[4711]: I1205 12:49:52.258640 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:52 crc kubenswrapper[4711]: I1205 12:49:52.310123 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-26hds"] Dec 05 12:49:54 crc kubenswrapper[4711]: I1205 12:49:54.223087 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-26hds" podUID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerName="registry-server" containerID="cri-o://2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389" gracePeriod=2 Dec 05 12:49:54 crc kubenswrapper[4711]: I1205 12:49:54.859838 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:54 crc kubenswrapper[4711]: I1205 12:49:54.972832 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-utilities\") pod \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " Dec 05 12:49:54 crc kubenswrapper[4711]: I1205 12:49:54.972929 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-catalog-content\") pod \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " Dec 05 12:49:54 crc kubenswrapper[4711]: I1205 12:49:54.973302 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c56cf\" (UniqueName: \"kubernetes.io/projected/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-kube-api-access-c56cf\") pod \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\" (UID: \"b3cc8b59-9e8d-4150-8234-3b56aeaa1642\") " Dec 05 12:49:54 crc kubenswrapper[4711]: I1205 12:49:54.975091 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-utilities" (OuterVolumeSpecName: "utilities") pod "b3cc8b59-9e8d-4150-8234-3b56aeaa1642" (UID: "b3cc8b59-9e8d-4150-8234-3b56aeaa1642"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:49:54 crc kubenswrapper[4711]: I1205 12:49:54.979439 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-kube-api-access-c56cf" (OuterVolumeSpecName: "kube-api-access-c56cf") pod "b3cc8b59-9e8d-4150-8234-3b56aeaa1642" (UID: "b3cc8b59-9e8d-4150-8234-3b56aeaa1642"). InnerVolumeSpecName "kube-api-access-c56cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.022805 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3cc8b59-9e8d-4150-8234-3b56aeaa1642" (UID: "b3cc8b59-9e8d-4150-8234-3b56aeaa1642"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.076532 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c56cf\" (UniqueName: \"kubernetes.io/projected/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-kube-api-access-c56cf\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.076577 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.076589 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3cc8b59-9e8d-4150-8234-3b56aeaa1642-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.235974 4711 generic.go:334] "Generic (PLEG): container finished" podID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerID="2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389" exitCode=0 Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.236032 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-26hds" event={"ID":"b3cc8b59-9e8d-4150-8234-3b56aeaa1642","Type":"ContainerDied","Data":"2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389"} Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.236071 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-26hds" event={"ID":"b3cc8b59-9e8d-4150-8234-3b56aeaa1642","Type":"ContainerDied","Data":"2a6b2332fdb63b26d58f4df4c1178fd72ba72ada279bdbb079d46789178489f3"} Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.236093 4711 scope.go:117] "RemoveContainer" containerID="2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.236181 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-26hds" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.266575 4711 scope.go:117] "RemoveContainer" containerID="be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.271057 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-26hds"] Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.281576 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-26hds"] Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.292880 4711 scope.go:117] "RemoveContainer" containerID="ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.332858 4711 scope.go:117] "RemoveContainer" containerID="2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389" Dec 05 12:49:55 crc kubenswrapper[4711]: E1205 12:49:55.333359 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389\": container with ID starting with 2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389 not found: ID does not exist" containerID="2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.333504 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389"} err="failed to get container status \"2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389\": rpc error: code = NotFound desc = could not find container \"2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389\": container with ID starting with 2d81b5a401b8c26e1ba9984dcb1cfcc052b411271319d39e6a498efa7e764389 not found: ID does not exist" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.333590 4711 scope.go:117] "RemoveContainer" containerID="be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62" Dec 05 12:49:55 crc kubenswrapper[4711]: E1205 12:49:55.334068 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62\": container with ID starting with be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62 not found: ID does not exist" containerID="be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.334098 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62"} err="failed to get container status \"be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62\": rpc error: code = NotFound desc = could not find container \"be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62\": container with ID starting with be90141adee624d46e84fa5957979306f009dd88f1186ba86fba1ca29b6d7e62 not found: ID does not exist" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.334120 4711 scope.go:117] "RemoveContainer" containerID="ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b" Dec 05 12:49:55 crc kubenswrapper[4711]: E1205 12:49:55.334373 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b\": container with ID starting with ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b not found: ID does not exist" containerID="ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b" Dec 05 12:49:55 crc kubenswrapper[4711]: I1205 12:49:55.334500 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b"} err="failed to get container status \"ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b\": rpc error: code = NotFound desc = could not find container \"ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b\": container with ID starting with ecc55c0d5abfe559b9a94271b780efba4cdf4735d133d70b029a37e355afd51b not found: ID does not exist" Dec 05 12:49:56 crc kubenswrapper[4711]: I1205 12:49:56.705534 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" path="/var/lib/kubelet/pods/b3cc8b59-9e8d-4150-8234-3b56aeaa1642/volumes" Dec 05 12:50:00 crc kubenswrapper[4711]: I1205 12:50:00.684636 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:50:00 crc kubenswrapper[4711]: E1205 12:50:00.685322 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:50:02 crc kubenswrapper[4711]: I1205 12:50:02.824682 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-5d757fb76c-sptb4" podUID="c319c3a5-f67a-47d7-bfe3-8e874cf01471" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 05 12:50:14 crc kubenswrapper[4711]: I1205 12:50:14.683379 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:50:14 crc kubenswrapper[4711]: E1205 12:50:14.684205 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:50:25 crc kubenswrapper[4711]: I1205 12:50:25.683654 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:50:25 crc kubenswrapper[4711]: E1205 12:50:25.684429 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:50:40 crc kubenswrapper[4711]: I1205 12:50:40.685464 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:50:40 crc kubenswrapper[4711]: E1205 12:50:40.686352 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:50:53 crc kubenswrapper[4711]: I1205 12:50:53.683632 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:50:53 crc kubenswrapper[4711]: E1205 12:50:53.684469 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:51:05 crc kubenswrapper[4711]: I1205 12:51:05.683068 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:51:05 crc kubenswrapper[4711]: E1205 12:51:05.683783 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:51:17 crc kubenswrapper[4711]: I1205 12:51:17.683620 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:51:17 crc kubenswrapper[4711]: E1205 12:51:17.684667 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:51:29 crc kubenswrapper[4711]: I1205 12:51:29.683961 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:51:30 crc kubenswrapper[4711]: I1205 12:51:30.197687 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"992ff64589cfdbb7bcd59f9b445dad66eb14685d69d82bcf7889baf314a91bcd"} Dec 05 12:52:17 crc kubenswrapper[4711]: I1205 12:52:17.671154 4711 generic.go:334] "Generic (PLEG): container finished" podID="2edaac33-536e-4fe8-9579-236b42229841" containerID="830ab1c5434b29c0357e7ca7c6a2adebc0a7b732efcfff8ebac8032cd6237ec9" exitCode=0 Dec 05 12:52:17 crc kubenswrapper[4711]: I1205 12:52:17.671227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" event={"ID":"2edaac33-536e-4fe8-9579-236b42229841","Type":"ContainerDied","Data":"830ab1c5434b29c0357e7ca7c6a2adebc0a7b732efcfff8ebac8032cd6237ec9"} Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.129061 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.181466 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b55f\" (UniqueName: \"kubernetes.io/projected/2edaac33-536e-4fe8-9579-236b42229841-kube-api-access-7b55f\") pod \"2edaac33-536e-4fe8-9579-236b42229841\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.181515 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-combined-ca-bundle\") pod \"2edaac33-536e-4fe8-9579-236b42229841\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.181540 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-ssh-key\") pod \"2edaac33-536e-4fe8-9579-236b42229841\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.181567 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-secret-0\") pod \"2edaac33-536e-4fe8-9579-236b42229841\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.181634 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-inventory\") pod \"2edaac33-536e-4fe8-9579-236b42229841\" (UID: \"2edaac33-536e-4fe8-9579-236b42229841\") " Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.188742 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "2edaac33-536e-4fe8-9579-236b42229841" (UID: "2edaac33-536e-4fe8-9579-236b42229841"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.190783 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2edaac33-536e-4fe8-9579-236b42229841-kube-api-access-7b55f" (OuterVolumeSpecName: "kube-api-access-7b55f") pod "2edaac33-536e-4fe8-9579-236b42229841" (UID: "2edaac33-536e-4fe8-9579-236b42229841"). InnerVolumeSpecName "kube-api-access-7b55f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.219064 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2edaac33-536e-4fe8-9579-236b42229841" (UID: "2edaac33-536e-4fe8-9579-236b42229841"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.228860 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "2edaac33-536e-4fe8-9579-236b42229841" (UID: "2edaac33-536e-4fe8-9579-236b42229841"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.234945 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-inventory" (OuterVolumeSpecName: "inventory") pod "2edaac33-536e-4fe8-9579-236b42229841" (UID: "2edaac33-536e-4fe8-9579-236b42229841"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.283658 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b55f\" (UniqueName: \"kubernetes.io/projected/2edaac33-536e-4fe8-9579-236b42229841-kube-api-access-7b55f\") on node \"crc\" DevicePath \"\"" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.283695 4711 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.283707 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.283717 4711 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.283728 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2edaac33-536e-4fe8-9579-236b42229841-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.697947 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" event={"ID":"2edaac33-536e-4fe8-9579-236b42229841","Type":"ContainerDied","Data":"8f88f198d1c26847e7ea92b0a6fb5c05944e97c12cd76d3cca74946045db148e"} Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.698002 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f88f198d1c26847e7ea92b0a6fb5c05944e97c12cd76d3cca74946045db148e" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.698024 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.836331 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6"] Dec 05 12:52:19 crc kubenswrapper[4711]: E1205 12:52:19.837380 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerName="extract-content" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.837414 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerName="extract-content" Dec 05 12:52:19 crc kubenswrapper[4711]: E1205 12:52:19.837430 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2edaac33-536e-4fe8-9579-236b42229841" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.837439 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2edaac33-536e-4fe8-9579-236b42229841" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 12:52:19 crc kubenswrapper[4711]: E1205 12:52:19.837463 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerName="extract-utilities" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.837470 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerName="extract-utilities" Dec 05 12:52:19 crc kubenswrapper[4711]: E1205 12:52:19.837489 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerName="registry-server" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.837494 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerName="registry-server" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.837729 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2edaac33-536e-4fe8-9579-236b42229841" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.837748 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3cc8b59-9e8d-4150-8234-3b56aeaa1642" containerName="registry-server" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.838661 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.842049 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.842332 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.842657 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.842850 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.842970 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.843073 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.844227 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.849250 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6"] Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.998659 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.999016 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.999039 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.999078 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.999197 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.999287 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smbtx\" (UniqueName: \"kubernetes.io/projected/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-kube-api-access-smbtx\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:19 crc kubenswrapper[4711]: I1205 12:52:19.999403 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:19.999972 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.000100 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.102021 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smbtx\" (UniqueName: \"kubernetes.io/projected/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-kube-api-access-smbtx\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.102101 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.102177 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.102283 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.103372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.103625 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.103690 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.103717 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.103781 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.103821 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.109277 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.109465 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.109635 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.109882 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.110559 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.111670 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.121010 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.125897 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smbtx\" (UniqueName: \"kubernetes.io/projected/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-kube-api-access-smbtx\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2zlb6\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.157463 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:52:20 crc kubenswrapper[4711]: I1205 12:52:20.783219 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6"] Dec 05 12:52:21 crc kubenswrapper[4711]: I1205 12:52:21.719852 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" event={"ID":"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144","Type":"ContainerStarted","Data":"ce803c33d006c8ac53dfe4e669709ffbe5e429f0d6082afe90c065a065a6d6cb"} Dec 05 12:52:21 crc kubenswrapper[4711]: I1205 12:52:21.720150 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" event={"ID":"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144","Type":"ContainerStarted","Data":"af450880804c48155989a776c78debc6c6bf4831f1d19607b5b83743dffeb2a0"} Dec 05 12:53:48 crc kubenswrapper[4711]: I1205 12:53:48.300770 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:53:48 crc kubenswrapper[4711]: I1205 12:53:48.301426 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:54:18 crc kubenswrapper[4711]: I1205 12:54:18.301374 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:54:18 crc kubenswrapper[4711]: I1205 12:54:18.301937 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:54:48 crc kubenswrapper[4711]: I1205 12:54:48.301148 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:54:48 crc kubenswrapper[4711]: I1205 12:54:48.302061 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:54:48 crc kubenswrapper[4711]: I1205 12:54:48.302153 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:54:48 crc kubenswrapper[4711]: I1205 12:54:48.303737 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"992ff64589cfdbb7bcd59f9b445dad66eb14685d69d82bcf7889baf314a91bcd"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:54:48 crc kubenswrapper[4711]: I1205 12:54:48.304009 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://992ff64589cfdbb7bcd59f9b445dad66eb14685d69d82bcf7889baf314a91bcd" gracePeriod=600 Dec 05 12:54:49 crc kubenswrapper[4711]: I1205 12:54:49.148657 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="992ff64589cfdbb7bcd59f9b445dad66eb14685d69d82bcf7889baf314a91bcd" exitCode=0 Dec 05 12:54:49 crc kubenswrapper[4711]: I1205 12:54:49.148830 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"992ff64589cfdbb7bcd59f9b445dad66eb14685d69d82bcf7889baf314a91bcd"} Dec 05 12:54:49 crc kubenswrapper[4711]: I1205 12:54:49.148992 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd"} Dec 05 12:54:49 crc kubenswrapper[4711]: I1205 12:54:49.149010 4711 scope.go:117] "RemoveContainer" containerID="391be7e4dbb94b23e1e253c95a62faf18bc49efea3c02ada4f63e35ded0e9a69" Dec 05 12:54:49 crc kubenswrapper[4711]: I1205 12:54:49.175070 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" podStartSLOduration=149.718050804 podStartE2EDuration="2m30.175051171s" podCreationTimestamp="2025-12-05 12:52:19 +0000 UTC" firstStartedPulling="2025-12-05 12:52:20.786970111 +0000 UTC m=+2586.371292441" lastFinishedPulling="2025-12-05 12:52:21.243970478 +0000 UTC m=+2586.828292808" observedRunningTime="2025-12-05 12:52:21.747079778 +0000 UTC m=+2587.331402098" watchObservedRunningTime="2025-12-05 12:54:49.175051171 +0000 UTC m=+2734.759373501" Dec 05 12:55:08 crc kubenswrapper[4711]: I1205 12:55:08.346258 4711 generic.go:334] "Generic (PLEG): container finished" podID="a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" containerID="ce803c33d006c8ac53dfe4e669709ffbe5e429f0d6082afe90c065a065a6d6cb" exitCode=0 Dec 05 12:55:08 crc kubenswrapper[4711]: I1205 12:55:08.346323 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" event={"ID":"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144","Type":"ContainerDied","Data":"ce803c33d006c8ac53dfe4e669709ffbe5e429f0d6082afe90c065a065a6d6cb"} Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.802666 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.925136 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-extra-config-0\") pod \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.925343 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smbtx\" (UniqueName: \"kubernetes.io/projected/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-kube-api-access-smbtx\") pod \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.925436 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-0\") pod \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.925523 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-1\") pod \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.925861 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-1\") pod \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.925899 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-inventory\") pod \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.925937 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-combined-ca-bundle\") pod \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.925978 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-0\") pod \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.926176 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-ssh-key\") pod \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\" (UID: \"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144\") " Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.944563 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" (UID: "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.945768 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-kube-api-access-smbtx" (OuterVolumeSpecName: "kube-api-access-smbtx") pod "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" (UID: "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144"). InnerVolumeSpecName "kube-api-access-smbtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.957494 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" (UID: "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.957494 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" (UID: "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.961018 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" (UID: "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.963352 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-inventory" (OuterVolumeSpecName: "inventory") pod "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" (UID: "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.973176 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" (UID: "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.976155 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" (UID: "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:09 crc kubenswrapper[4711]: I1205 12:55:09.982123 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" (UID: "a90b9716-a1b0-4a0b-9fc8-6ca9358a9144"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.028923 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smbtx\" (UniqueName: \"kubernetes.io/projected/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-kube-api-access-smbtx\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.028952 4711 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.028961 4711 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.028973 4711 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.028981 4711 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.028992 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.029001 4711 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.029009 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.029020 4711 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a90b9716-a1b0-4a0b-9fc8-6ca9358a9144-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.369018 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" event={"ID":"a90b9716-a1b0-4a0b-9fc8-6ca9358a9144","Type":"ContainerDied","Data":"af450880804c48155989a776c78debc6c6bf4831f1d19607b5b83743dffeb2a0"} Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.369340 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af450880804c48155989a776c78debc6c6bf4831f1d19607b5b83743dffeb2a0" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.369077 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2zlb6" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.482095 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn"] Dec 05 12:55:10 crc kubenswrapper[4711]: E1205 12:55:10.482550 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.482569 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.482757 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a90b9716-a1b0-4a0b-9fc8-6ca9358a9144" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.483489 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.485513 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.485746 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pgpgb" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.485946 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.487069 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.492808 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.503164 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn"] Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.540039 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.540422 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9tq5\" (UniqueName: \"kubernetes.io/projected/e5532dde-9201-42f7-bc4c-4837ad84aa24-kube-api-access-w9tq5\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.540627 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.540752 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.540875 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.541062 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.541186 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.644072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.644222 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9tq5\" (UniqueName: \"kubernetes.io/projected/e5532dde-9201-42f7-bc4c-4837ad84aa24-kube-api-access-w9tq5\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.644306 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.644358 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.644433 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.644518 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.644567 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.650725 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.651523 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.651523 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.652194 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.653379 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.657844 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.660925 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9tq5\" (UniqueName: \"kubernetes.io/projected/e5532dde-9201-42f7-bc4c-4837ad84aa24-kube-api-access-w9tq5\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6flsn\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:10 crc kubenswrapper[4711]: I1205 12:55:10.815117 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:55:11 crc kubenswrapper[4711]: I1205 12:55:11.369025 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn"] Dec 05 12:55:11 crc kubenswrapper[4711]: W1205 12:55:11.386897 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5532dde_9201_42f7_bc4c_4837ad84aa24.slice/crio-76809d4358a2daedd320173d3bc19a099cbb32ff902a4a0810f8d612debcd57b WatchSource:0}: Error finding container 76809d4358a2daedd320173d3bc19a099cbb32ff902a4a0810f8d612debcd57b: Status 404 returned error can't find the container with id 76809d4358a2daedd320173d3bc19a099cbb32ff902a4a0810f8d612debcd57b Dec 05 12:55:11 crc kubenswrapper[4711]: I1205 12:55:11.395869 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:55:12 crc kubenswrapper[4711]: I1205 12:55:12.390042 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" event={"ID":"e5532dde-9201-42f7-bc4c-4837ad84aa24","Type":"ContainerStarted","Data":"f3c2c21a09ee404ef8d6cecd10458e2de1d0bbdde402770ab1a0f6870acdfae8"} Dec 05 12:55:12 crc kubenswrapper[4711]: I1205 12:55:12.391504 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" event={"ID":"e5532dde-9201-42f7-bc4c-4837ad84aa24","Type":"ContainerStarted","Data":"76809d4358a2daedd320173d3bc19a099cbb32ff902a4a0810f8d612debcd57b"} Dec 05 12:55:12 crc kubenswrapper[4711]: I1205 12:55:12.415133 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" podStartSLOduration=2.002321542 podStartE2EDuration="2.415114688s" podCreationTimestamp="2025-12-05 12:55:10 +0000 UTC" firstStartedPulling="2025-12-05 12:55:11.392654061 +0000 UTC m=+2756.976976391" lastFinishedPulling="2025-12-05 12:55:11.805447207 +0000 UTC m=+2757.389769537" observedRunningTime="2025-12-05 12:55:12.405973073 +0000 UTC m=+2757.990295413" watchObservedRunningTime="2025-12-05 12:55:12.415114688 +0000 UTC m=+2757.999437018" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.253498 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tkzvn"] Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.255941 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.264805 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkzvn"] Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.316632 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-catalog-content\") pod \"redhat-marketplace-tkzvn\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.316937 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlxjd\" (UniqueName: \"kubernetes.io/projected/82c7a5ee-ff48-49af-ab7c-440a63d678f6-kube-api-access-jlxjd\") pod \"redhat-marketplace-tkzvn\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.317201 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-utilities\") pod \"redhat-marketplace-tkzvn\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.419195 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlxjd\" (UniqueName: \"kubernetes.io/projected/82c7a5ee-ff48-49af-ab7c-440a63d678f6-kube-api-access-jlxjd\") pod \"redhat-marketplace-tkzvn\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.419334 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-utilities\") pod \"redhat-marketplace-tkzvn\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.419517 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-catalog-content\") pod \"redhat-marketplace-tkzvn\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.420105 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-catalog-content\") pod \"redhat-marketplace-tkzvn\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.420168 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-utilities\") pod \"redhat-marketplace-tkzvn\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.446180 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlxjd\" (UniqueName: \"kubernetes.io/projected/82c7a5ee-ff48-49af-ab7c-440a63d678f6-kube-api-access-jlxjd\") pod \"redhat-marketplace-tkzvn\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:24 crc kubenswrapper[4711]: I1205 12:55:24.588968 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:25 crc kubenswrapper[4711]: I1205 12:55:25.076979 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkzvn"] Dec 05 12:55:25 crc kubenswrapper[4711]: I1205 12:55:25.527469 4711 generic.go:334] "Generic (PLEG): container finished" podID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerID="3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f" exitCode=0 Dec 05 12:55:25 crc kubenswrapper[4711]: I1205 12:55:25.527552 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkzvn" event={"ID":"82c7a5ee-ff48-49af-ab7c-440a63d678f6","Type":"ContainerDied","Data":"3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f"} Dec 05 12:55:25 crc kubenswrapper[4711]: I1205 12:55:25.527750 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkzvn" event={"ID":"82c7a5ee-ff48-49af-ab7c-440a63d678f6","Type":"ContainerStarted","Data":"f212277c8b64aad7b27974d2b227f49d022893fc6af7c9b171e2becd36b2f26c"} Dec 05 12:55:26 crc kubenswrapper[4711]: I1205 12:55:26.538840 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkzvn" event={"ID":"82c7a5ee-ff48-49af-ab7c-440a63d678f6","Type":"ContainerStarted","Data":"d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a"} Dec 05 12:55:27 crc kubenswrapper[4711]: I1205 12:55:27.552526 4711 generic.go:334] "Generic (PLEG): container finished" podID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerID="d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a" exitCode=0 Dec 05 12:55:27 crc kubenswrapper[4711]: I1205 12:55:27.552572 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkzvn" event={"ID":"82c7a5ee-ff48-49af-ab7c-440a63d678f6","Type":"ContainerDied","Data":"d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a"} Dec 05 12:55:28 crc kubenswrapper[4711]: I1205 12:55:28.563511 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkzvn" event={"ID":"82c7a5ee-ff48-49af-ab7c-440a63d678f6","Type":"ContainerStarted","Data":"90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4"} Dec 05 12:55:28 crc kubenswrapper[4711]: I1205 12:55:28.587276 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tkzvn" podStartSLOduration=2.178598451 podStartE2EDuration="4.587254944s" podCreationTimestamp="2025-12-05 12:55:24 +0000 UTC" firstStartedPulling="2025-12-05 12:55:25.529953401 +0000 UTC m=+2771.114275731" lastFinishedPulling="2025-12-05 12:55:27.938609894 +0000 UTC m=+2773.522932224" observedRunningTime="2025-12-05 12:55:28.579651977 +0000 UTC m=+2774.163974317" watchObservedRunningTime="2025-12-05 12:55:28.587254944 +0000 UTC m=+2774.171577274" Dec 05 12:55:34 crc kubenswrapper[4711]: I1205 12:55:34.590369 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:34 crc kubenswrapper[4711]: I1205 12:55:34.591285 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:34 crc kubenswrapper[4711]: I1205 12:55:34.652000 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:34 crc kubenswrapper[4711]: I1205 12:55:34.718973 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:35 crc kubenswrapper[4711]: I1205 12:55:35.840467 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkzvn"] Dec 05 12:55:36 crc kubenswrapper[4711]: I1205 12:55:36.668720 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tkzvn" podUID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerName="registry-server" containerID="cri-o://90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4" gracePeriod=2 Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.092354 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.171364 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlxjd\" (UniqueName: \"kubernetes.io/projected/82c7a5ee-ff48-49af-ab7c-440a63d678f6-kube-api-access-jlxjd\") pod \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.171728 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-catalog-content\") pod \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.171875 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-utilities\") pod \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\" (UID: \"82c7a5ee-ff48-49af-ab7c-440a63d678f6\") " Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.172777 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-utilities" (OuterVolumeSpecName: "utilities") pod "82c7a5ee-ff48-49af-ab7c-440a63d678f6" (UID: "82c7a5ee-ff48-49af-ab7c-440a63d678f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.182811 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82c7a5ee-ff48-49af-ab7c-440a63d678f6-kube-api-access-jlxjd" (OuterVolumeSpecName: "kube-api-access-jlxjd") pod "82c7a5ee-ff48-49af-ab7c-440a63d678f6" (UID: "82c7a5ee-ff48-49af-ab7c-440a63d678f6"). InnerVolumeSpecName "kube-api-access-jlxjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.192579 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "82c7a5ee-ff48-49af-ab7c-440a63d678f6" (UID: "82c7a5ee-ff48-49af-ab7c-440a63d678f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.273940 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlxjd\" (UniqueName: \"kubernetes.io/projected/82c7a5ee-ff48-49af-ab7c-440a63d678f6-kube-api-access-jlxjd\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.274001 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.274012 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82c7a5ee-ff48-49af-ab7c-440a63d678f6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.678604 4711 generic.go:334] "Generic (PLEG): container finished" podID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerID="90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4" exitCode=0 Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.678666 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkzvn" event={"ID":"82c7a5ee-ff48-49af-ab7c-440a63d678f6","Type":"ContainerDied","Data":"90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4"} Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.678690 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkzvn" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.678700 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkzvn" event={"ID":"82c7a5ee-ff48-49af-ab7c-440a63d678f6","Type":"ContainerDied","Data":"f212277c8b64aad7b27974d2b227f49d022893fc6af7c9b171e2becd36b2f26c"} Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.678766 4711 scope.go:117] "RemoveContainer" containerID="90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.708070 4711 scope.go:117] "RemoveContainer" containerID="d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.723345 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkzvn"] Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.732480 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkzvn"] Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.736113 4711 scope.go:117] "RemoveContainer" containerID="3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.786724 4711 scope.go:117] "RemoveContainer" containerID="90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4" Dec 05 12:55:37 crc kubenswrapper[4711]: E1205 12:55:37.787735 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4\": container with ID starting with 90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4 not found: ID does not exist" containerID="90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.787782 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4"} err="failed to get container status \"90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4\": rpc error: code = NotFound desc = could not find container \"90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4\": container with ID starting with 90fef6fec8ef20476d4214b874a6fe6ce7d8078b0078d453a8b590c2910fd5b4 not found: ID does not exist" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.787813 4711 scope.go:117] "RemoveContainer" containerID="d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a" Dec 05 12:55:37 crc kubenswrapper[4711]: E1205 12:55:37.788566 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a\": container with ID starting with d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a not found: ID does not exist" containerID="d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.788598 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a"} err="failed to get container status \"d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a\": rpc error: code = NotFound desc = could not find container \"d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a\": container with ID starting with d9ea68a4b5f896f1cbd34fb0034fd50e0ff6d58e59785ff89b8e264ccccb014a not found: ID does not exist" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.788619 4711 scope.go:117] "RemoveContainer" containerID="3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f" Dec 05 12:55:37 crc kubenswrapper[4711]: E1205 12:55:37.788986 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f\": container with ID starting with 3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f not found: ID does not exist" containerID="3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f" Dec 05 12:55:37 crc kubenswrapper[4711]: I1205 12:55:37.789012 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f"} err="failed to get container status \"3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f\": rpc error: code = NotFound desc = could not find container \"3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f\": container with ID starting with 3300b3a49d19d015fb8c2ccb4820a6386dd304c3914e65ab1a4774bc35fd796f not found: ID does not exist" Dec 05 12:55:38 crc kubenswrapper[4711]: I1205 12:55:38.695256 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" path="/var/lib/kubelet/pods/82c7a5ee-ff48-49af-ab7c-440a63d678f6/volumes" Dec 05 12:56:48 crc kubenswrapper[4711]: I1205 12:56:48.300617 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:56:48 crc kubenswrapper[4711]: I1205 12:56:48.301067 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.262035 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fvz55"] Dec 05 12:57:17 crc kubenswrapper[4711]: E1205 12:57:17.263156 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerName="registry-server" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.263174 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerName="registry-server" Dec 05 12:57:17 crc kubenswrapper[4711]: E1205 12:57:17.263190 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerName="extract-utilities" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.263198 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerName="extract-utilities" Dec 05 12:57:17 crc kubenswrapper[4711]: E1205 12:57:17.263231 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerName="extract-content" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.263241 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerName="extract-content" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.263494 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="82c7a5ee-ff48-49af-ab7c-440a63d678f6" containerName="registry-server" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.265241 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.275370 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fvz55"] Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.339466 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-catalog-content\") pod \"redhat-operators-fvz55\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.339724 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-utilities\") pod \"redhat-operators-fvz55\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.339780 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl4fk\" (UniqueName: \"kubernetes.io/projected/3f58bc94-f372-45aa-847c-66afe194d1ee-kube-api-access-hl4fk\") pod \"redhat-operators-fvz55\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.441228 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-catalog-content\") pod \"redhat-operators-fvz55\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.441270 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-utilities\") pod \"redhat-operators-fvz55\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.441325 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl4fk\" (UniqueName: \"kubernetes.io/projected/3f58bc94-f372-45aa-847c-66afe194d1ee-kube-api-access-hl4fk\") pod \"redhat-operators-fvz55\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.441741 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-catalog-content\") pod \"redhat-operators-fvz55\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.441806 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-utilities\") pod \"redhat-operators-fvz55\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.462596 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl4fk\" (UniqueName: \"kubernetes.io/projected/3f58bc94-f372-45aa-847c-66afe194d1ee-kube-api-access-hl4fk\") pod \"redhat-operators-fvz55\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:17 crc kubenswrapper[4711]: I1205 12:57:17.605464 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:18 crc kubenswrapper[4711]: I1205 12:57:18.118024 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fvz55"] Dec 05 12:57:18 crc kubenswrapper[4711]: I1205 12:57:18.301098 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:57:18 crc kubenswrapper[4711]: I1205 12:57:18.301489 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:57:18 crc kubenswrapper[4711]: I1205 12:57:18.642260 4711 generic.go:334] "Generic (PLEG): container finished" podID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerID="f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8" exitCode=0 Dec 05 12:57:18 crc kubenswrapper[4711]: I1205 12:57:18.642326 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvz55" event={"ID":"3f58bc94-f372-45aa-847c-66afe194d1ee","Type":"ContainerDied","Data":"f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8"} Dec 05 12:57:18 crc kubenswrapper[4711]: I1205 12:57:18.642377 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvz55" event={"ID":"3f58bc94-f372-45aa-847c-66afe194d1ee","Type":"ContainerStarted","Data":"525bc76ae764a68884f8bc526face88c45067d56e95361e3085b258b5c19832f"} Dec 05 12:57:19 crc kubenswrapper[4711]: I1205 12:57:19.659070 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvz55" event={"ID":"3f58bc94-f372-45aa-847c-66afe194d1ee","Type":"ContainerStarted","Data":"8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7"} Dec 05 12:57:20 crc kubenswrapper[4711]: E1205 12:57:20.935000 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f58bc94_f372_45aa_847c_66afe194d1ee.slice/crio-8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7.scope\": RecentStats: unable to find data in memory cache]" Dec 05 12:57:22 crc kubenswrapper[4711]: I1205 12:57:22.699872 4711 generic.go:334] "Generic (PLEG): container finished" podID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerID="8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7" exitCode=0 Dec 05 12:57:22 crc kubenswrapper[4711]: I1205 12:57:22.701694 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvz55" event={"ID":"3f58bc94-f372-45aa-847c-66afe194d1ee","Type":"ContainerDied","Data":"8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7"} Dec 05 12:57:22 crc kubenswrapper[4711]: I1205 12:57:22.701990 4711 generic.go:334] "Generic (PLEG): container finished" podID="e5532dde-9201-42f7-bc4c-4837ad84aa24" containerID="f3c2c21a09ee404ef8d6cecd10458e2de1d0bbdde402770ab1a0f6870acdfae8" exitCode=0 Dec 05 12:57:22 crc kubenswrapper[4711]: I1205 12:57:22.702056 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" event={"ID":"e5532dde-9201-42f7-bc4c-4837ad84aa24","Type":"ContainerDied","Data":"f3c2c21a09ee404ef8d6cecd10458e2de1d0bbdde402770ab1a0f6870acdfae8"} Dec 05 12:57:23 crc kubenswrapper[4711]: I1205 12:57:23.715330 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvz55" event={"ID":"3f58bc94-f372-45aa-847c-66afe194d1ee","Type":"ContainerStarted","Data":"8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a"} Dec 05 12:57:23 crc kubenswrapper[4711]: I1205 12:57:23.744666 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fvz55" podStartSLOduration=2.256972155 podStartE2EDuration="6.744642591s" podCreationTimestamp="2025-12-05 12:57:17 +0000 UTC" firstStartedPulling="2025-12-05 12:57:18.643797818 +0000 UTC m=+2884.228120148" lastFinishedPulling="2025-12-05 12:57:23.131468254 +0000 UTC m=+2888.715790584" observedRunningTime="2025-12-05 12:57:23.73320892 +0000 UTC m=+2889.317531260" watchObservedRunningTime="2025-12-05 12:57:23.744642591 +0000 UTC m=+2889.328964921" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.232591 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.389132 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9tq5\" (UniqueName: \"kubernetes.io/projected/e5532dde-9201-42f7-bc4c-4837ad84aa24-kube-api-access-w9tq5\") pod \"e5532dde-9201-42f7-bc4c-4837ad84aa24\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.389189 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ssh-key\") pod \"e5532dde-9201-42f7-bc4c-4837ad84aa24\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.389265 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-0\") pod \"e5532dde-9201-42f7-bc4c-4837ad84aa24\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.389296 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-1\") pod \"e5532dde-9201-42f7-bc4c-4837ad84aa24\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.389377 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-telemetry-combined-ca-bundle\") pod \"e5532dde-9201-42f7-bc4c-4837ad84aa24\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.389469 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-inventory\") pod \"e5532dde-9201-42f7-bc4c-4837ad84aa24\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.389563 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-2\") pod \"e5532dde-9201-42f7-bc4c-4837ad84aa24\" (UID: \"e5532dde-9201-42f7-bc4c-4837ad84aa24\") " Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.398371 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "e5532dde-9201-42f7-bc4c-4837ad84aa24" (UID: "e5532dde-9201-42f7-bc4c-4837ad84aa24"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.403753 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5532dde-9201-42f7-bc4c-4837ad84aa24-kube-api-access-w9tq5" (OuterVolumeSpecName: "kube-api-access-w9tq5") pod "e5532dde-9201-42f7-bc4c-4837ad84aa24" (UID: "e5532dde-9201-42f7-bc4c-4837ad84aa24"). InnerVolumeSpecName "kube-api-access-w9tq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.427005 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "e5532dde-9201-42f7-bc4c-4837ad84aa24" (UID: "e5532dde-9201-42f7-bc4c-4837ad84aa24"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.433701 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e5532dde-9201-42f7-bc4c-4837ad84aa24" (UID: "e5532dde-9201-42f7-bc4c-4837ad84aa24"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.437793 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "e5532dde-9201-42f7-bc4c-4837ad84aa24" (UID: "e5532dde-9201-42f7-bc4c-4837ad84aa24"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.439299 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-inventory" (OuterVolumeSpecName: "inventory") pod "e5532dde-9201-42f7-bc4c-4837ad84aa24" (UID: "e5532dde-9201-42f7-bc4c-4837ad84aa24"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.439513 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "e5532dde-9201-42f7-bc4c-4837ad84aa24" (UID: "e5532dde-9201-42f7-bc4c-4837ad84aa24"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.491445 4711 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.491490 4711 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.491508 4711 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.491527 4711 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.491539 4711 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.491551 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9tq5\" (UniqueName: \"kubernetes.io/projected/e5532dde-9201-42f7-bc4c-4837ad84aa24-kube-api-access-w9tq5\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.491580 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5532dde-9201-42f7-bc4c-4837ad84aa24-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.728424 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" event={"ID":"e5532dde-9201-42f7-bc4c-4837ad84aa24","Type":"ContainerDied","Data":"76809d4358a2daedd320173d3bc19a099cbb32ff902a4a0810f8d612debcd57b"} Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.728471 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76809d4358a2daedd320173d3bc19a099cbb32ff902a4a0810f8d612debcd57b" Dec 05 12:57:24 crc kubenswrapper[4711]: I1205 12:57:24.728477 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6flsn" Dec 05 12:57:27 crc kubenswrapper[4711]: I1205 12:57:27.606127 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:27 crc kubenswrapper[4711]: I1205 12:57:27.608175 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:28 crc kubenswrapper[4711]: I1205 12:57:28.663058 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fvz55" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerName="registry-server" probeResult="failure" output=< Dec 05 12:57:28 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 12:57:28 crc kubenswrapper[4711]: > Dec 05 12:57:37 crc kubenswrapper[4711]: I1205 12:57:37.673208 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:37 crc kubenswrapper[4711]: I1205 12:57:37.737860 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:37 crc kubenswrapper[4711]: I1205 12:57:37.917968 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fvz55"] Dec 05 12:57:38 crc kubenswrapper[4711]: I1205 12:57:38.890628 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fvz55" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerName="registry-server" containerID="cri-o://8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a" gracePeriod=2 Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.365663 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.520550 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hl4fk\" (UniqueName: \"kubernetes.io/projected/3f58bc94-f372-45aa-847c-66afe194d1ee-kube-api-access-hl4fk\") pod \"3f58bc94-f372-45aa-847c-66afe194d1ee\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.520591 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-catalog-content\") pod \"3f58bc94-f372-45aa-847c-66afe194d1ee\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.520765 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-utilities\") pod \"3f58bc94-f372-45aa-847c-66afe194d1ee\" (UID: \"3f58bc94-f372-45aa-847c-66afe194d1ee\") " Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.521555 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-utilities" (OuterVolumeSpecName: "utilities") pod "3f58bc94-f372-45aa-847c-66afe194d1ee" (UID: "3f58bc94-f372-45aa-847c-66afe194d1ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.526953 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f58bc94-f372-45aa-847c-66afe194d1ee-kube-api-access-hl4fk" (OuterVolumeSpecName: "kube-api-access-hl4fk") pod "3f58bc94-f372-45aa-847c-66afe194d1ee" (UID: "3f58bc94-f372-45aa-847c-66afe194d1ee"). InnerVolumeSpecName "kube-api-access-hl4fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.623664 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hl4fk\" (UniqueName: \"kubernetes.io/projected/3f58bc94-f372-45aa-847c-66afe194d1ee-kube-api-access-hl4fk\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.623742 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.681057 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3f58bc94-f372-45aa-847c-66afe194d1ee" (UID: "3f58bc94-f372-45aa-847c-66afe194d1ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.726097 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f58bc94-f372-45aa-847c-66afe194d1ee-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.909075 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvz55" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.909031 4711 generic.go:334] "Generic (PLEG): container finished" podID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerID="8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a" exitCode=0 Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.909116 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvz55" event={"ID":"3f58bc94-f372-45aa-847c-66afe194d1ee","Type":"ContainerDied","Data":"8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a"} Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.910836 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvz55" event={"ID":"3f58bc94-f372-45aa-847c-66afe194d1ee","Type":"ContainerDied","Data":"525bc76ae764a68884f8bc526face88c45067d56e95361e3085b258b5c19832f"} Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.910910 4711 scope.go:117] "RemoveContainer" containerID="8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.938911 4711 scope.go:117] "RemoveContainer" containerID="8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7" Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.954444 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fvz55"] Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.962378 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fvz55"] Dec 05 12:57:39 crc kubenswrapper[4711]: I1205 12:57:39.983456 4711 scope.go:117] "RemoveContainer" containerID="f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8" Dec 05 12:57:40 crc kubenswrapper[4711]: I1205 12:57:40.050360 4711 scope.go:117] "RemoveContainer" containerID="8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a" Dec 05 12:57:40 crc kubenswrapper[4711]: E1205 12:57:40.050922 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a\": container with ID starting with 8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a not found: ID does not exist" containerID="8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a" Dec 05 12:57:40 crc kubenswrapper[4711]: I1205 12:57:40.050999 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a"} err="failed to get container status \"8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a\": rpc error: code = NotFound desc = could not find container \"8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a\": container with ID starting with 8314d818555ad8cc3ff4d4153b525656319731e5eb879b7b49ba51193c2a6e5a not found: ID does not exist" Dec 05 12:57:40 crc kubenswrapper[4711]: I1205 12:57:40.051047 4711 scope.go:117] "RemoveContainer" containerID="8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7" Dec 05 12:57:40 crc kubenswrapper[4711]: E1205 12:57:40.051538 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7\": container with ID starting with 8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7 not found: ID does not exist" containerID="8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7" Dec 05 12:57:40 crc kubenswrapper[4711]: I1205 12:57:40.051639 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7"} err="failed to get container status \"8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7\": rpc error: code = NotFound desc = could not find container \"8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7\": container with ID starting with 8764b2e4077162f5546a361bb95d052c3b5465e1a778f4c74e595ca0631339c7 not found: ID does not exist" Dec 05 12:57:40 crc kubenswrapper[4711]: I1205 12:57:40.051724 4711 scope.go:117] "RemoveContainer" containerID="f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8" Dec 05 12:57:40 crc kubenswrapper[4711]: E1205 12:57:40.052472 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8\": container with ID starting with f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8 not found: ID does not exist" containerID="f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8" Dec 05 12:57:40 crc kubenswrapper[4711]: I1205 12:57:40.052518 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8"} err="failed to get container status \"f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8\": rpc error: code = NotFound desc = could not find container \"f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8\": container with ID starting with f3443b18fdc838dd742555674ec6a39052044e087f3359c6344446fc8bdc97b8 not found: ID does not exist" Dec 05 12:57:40 crc kubenswrapper[4711]: I1205 12:57:40.697498 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" path="/var/lib/kubelet/pods/3f58bc94-f372-45aa-847c-66afe194d1ee/volumes" Dec 05 12:57:48 crc kubenswrapper[4711]: I1205 12:57:48.300506 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:57:48 crc kubenswrapper[4711]: I1205 12:57:48.301108 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:57:48 crc kubenswrapper[4711]: I1205 12:57:48.301170 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 12:57:48 crc kubenswrapper[4711]: I1205 12:57:48.302124 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:57:48 crc kubenswrapper[4711]: I1205 12:57:48.302187 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" gracePeriod=600 Dec 05 12:57:48 crc kubenswrapper[4711]: E1205 12:57:48.431044 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:57:49 crc kubenswrapper[4711]: I1205 12:57:49.007222 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" exitCode=0 Dec 05 12:57:49 crc kubenswrapper[4711]: I1205 12:57:49.007307 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd"} Dec 05 12:57:49 crc kubenswrapper[4711]: I1205 12:57:49.007843 4711 scope.go:117] "RemoveContainer" containerID="992ff64589cfdbb7bcd59f9b445dad66eb14685d69d82bcf7889baf314a91bcd" Dec 05 12:57:49 crc kubenswrapper[4711]: I1205 12:57:49.008513 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:57:49 crc kubenswrapper[4711]: E1205 12:57:49.008819 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.387350 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 05 12:58:02 crc kubenswrapper[4711]: E1205 12:58:02.388273 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5532dde-9201-42f7-bc4c-4837ad84aa24" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.388288 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5532dde-9201-42f7-bc4c-4837ad84aa24" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 12:58:02 crc kubenswrapper[4711]: E1205 12:58:02.388306 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerName="extract-utilities" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.388314 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerName="extract-utilities" Dec 05 12:58:02 crc kubenswrapper[4711]: E1205 12:58:02.388327 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerName="extract-content" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.388335 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerName="extract-content" Dec 05 12:58:02 crc kubenswrapper[4711]: E1205 12:58:02.388562 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerName="registry-server" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.388575 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerName="registry-server" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.388851 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5532dde-9201-42f7-bc4c-4837ad84aa24" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.388880 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f58bc94-f372-45aa-847c-66afe194d1ee" containerName="registry-server" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.390169 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.394777 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.414820 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.479216 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-nfs-0"] Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.481869 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.483806 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-nfs-config-data" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.496985 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497051 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-config-data\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497075 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497096 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497141 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-run\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497164 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-scripts\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497216 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-lib-modules\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497240 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-dev\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497262 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-sys\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497298 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-config-data-custom\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497340 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497363 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-etc-nvme\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497407 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497431 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4762\" (UniqueName: \"kubernetes.io/projected/ad25585d-a381-43fc-8254-91ed6b58e1da-kube-api-access-w4762\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.497474 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.524772 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.527609 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.535831 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-nfs-2-config-data" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.542498 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-0"] Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.557415 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.601954 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.602517 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-dev\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.602656 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.602689 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.603854 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-config-data-custom\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604018 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604054 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604192 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604323 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-etc-nvme\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604356 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-sys\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604509 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604546 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604682 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4762\" (UniqueName: \"kubernetes.io/projected/ad25585d-a381-43fc-8254-91ed6b58e1da-kube-api-access-w4762\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604851 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604883 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-run\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604930 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604955 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605059 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-etc-nvme\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.604599 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605501 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605543 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605574 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605598 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605645 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605670 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605691 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605714 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605738 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605781 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605809 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-config-data\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605836 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605858 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605879 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605903 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605930 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jszr\" (UniqueName: \"kubernetes.io/projected/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-kube-api-access-9jszr\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605954 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.605988 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606012 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606041 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606065 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-run\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606103 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-scripts\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606148 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-lib-modules\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606199 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606221 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-dev\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606245 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zrp9\" (UniqueName: \"kubernetes.io/projected/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-kube-api-access-8zrp9\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606270 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606299 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-sys\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606421 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-sys\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.606451 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.607079 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-run\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.607097 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.607493 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.607541 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.607557 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-dev\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.607574 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad25585d-a381-43fc-8254-91ed6b58e1da-lib-modules\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.611202 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.612193 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-scripts\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.614299 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-config-data\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.624827 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad25585d-a381-43fc-8254-91ed6b58e1da-config-data-custom\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.626973 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4762\" (UniqueName: \"kubernetes.io/projected/ad25585d-a381-43fc-8254-91ed6b58e1da-kube-api-access-w4762\") pod \"cinder-backup-0\" (UID: \"ad25585d-a381-43fc-8254-91ed6b58e1da\") " pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.685047 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:58:02 crc kubenswrapper[4711]: E1205 12:58:02.685481 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708319 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708370 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708427 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708457 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708462 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708487 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-sys\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708493 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708539 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708573 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-sys\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708617 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708714 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708718 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708753 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708761 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-run\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708781 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708798 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708819 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-run\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708832 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708851 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708866 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708895 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708913 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708933 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.708953 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709023 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709050 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709065 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709084 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jszr\" (UniqueName: \"kubernetes.io/projected/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-kube-api-access-9jszr\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709122 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709140 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709158 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709218 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709236 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zrp9\" (UniqueName: \"kubernetes.io/projected/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-kube-api-access-8zrp9\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709251 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709283 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-dev\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709307 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709813 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.709849 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.710362 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.710608 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.710671 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.710965 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.711146 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.711273 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.711372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.711826 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.712245 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.712280 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.711316 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-dev\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.712656 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.713190 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.716127 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.716589 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.716936 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.718138 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.721024 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.723827 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.726595 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.731753 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zrp9\" (UniqueName: \"kubernetes.io/projected/41cbfd27-4dc3-4989-8e6d-518ebc6c95c6-kube-api-access-8zrp9\") pod \"cinder-volume-nfs-0\" (UID: \"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6\") " pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.737276 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jszr\" (UniqueName: \"kubernetes.io/projected/73f78a38-93dc-4cf3-9d10-51ff489c6aa5-kube-api-access-9jszr\") pod \"cinder-volume-nfs-2-0\" (UID: \"73f78a38-93dc-4cf3-9d10-51ff489c6aa5\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.824912 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:02 crc kubenswrapper[4711]: I1205 12:58:02.853555 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:03 crc kubenswrapper[4711]: I1205 12:58:03.280642 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 12:58:03 crc kubenswrapper[4711]: I1205 12:58:03.439246 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-0"] Dec 05 12:58:03 crc kubenswrapper[4711]: W1205 12:58:03.455694 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41cbfd27_4dc3_4989_8e6d_518ebc6c95c6.slice/crio-e166f89fc60deabea02328366dacc901032edd12e32174b8568b8e49eb6c4ccf WatchSource:0}: Error finding container e166f89fc60deabea02328366dacc901032edd12e32174b8568b8e49eb6c4ccf: Status 404 returned error can't find the container with id e166f89fc60deabea02328366dacc901032edd12e32174b8568b8e49eb6c4ccf Dec 05 12:58:03 crc kubenswrapper[4711]: I1205 12:58:03.531868 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Dec 05 12:58:03 crc kubenswrapper[4711]: W1205 12:58:03.541964 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73f78a38_93dc_4cf3_9d10_51ff489c6aa5.slice/crio-68ba5c76e304b560cd0c138325f45041be4b75f5626ec5bde05d08510883539c WatchSource:0}: Error finding container 68ba5c76e304b560cd0c138325f45041be4b75f5626ec5bde05d08510883539c: Status 404 returned error can't find the container with id 68ba5c76e304b560cd0c138325f45041be4b75f5626ec5bde05d08510883539c Dec 05 12:58:04 crc kubenswrapper[4711]: I1205 12:58:04.160881 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6","Type":"ContainerStarted","Data":"8db975498fd7c2bae90df334670bf04a04619026d294efa10833fb6c7e0fca43"} Dec 05 12:58:04 crc kubenswrapper[4711]: I1205 12:58:04.161222 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6","Type":"ContainerStarted","Data":"e166f89fc60deabea02328366dacc901032edd12e32174b8568b8e49eb6c4ccf"} Dec 05 12:58:04 crc kubenswrapper[4711]: I1205 12:58:04.162254 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"ad25585d-a381-43fc-8254-91ed6b58e1da","Type":"ContainerStarted","Data":"3abdfcbeba6d5e3b74fe9db9150536f63ed982fb23d747f2a2658f7371bfcec6"} Dec 05 12:58:04 crc kubenswrapper[4711]: I1205 12:58:04.162298 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"ad25585d-a381-43fc-8254-91ed6b58e1da","Type":"ContainerStarted","Data":"be471b8f72c56b7f57c597cbd5a2a0dac106019a1685ce771ea4be7fbfc90a2f"} Dec 05 12:58:04 crc kubenswrapper[4711]: I1205 12:58:04.164110 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"73f78a38-93dc-4cf3-9d10-51ff489c6aa5","Type":"ContainerStarted","Data":"1db5b94734b6ba4ddc53188622e7f5360dadf7cc37aa861dc78775952b175abf"} Dec 05 12:58:04 crc kubenswrapper[4711]: I1205 12:58:04.164136 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"73f78a38-93dc-4cf3-9d10-51ff489c6aa5","Type":"ContainerStarted","Data":"68ba5c76e304b560cd0c138325f45041be4b75f5626ec5bde05d08510883539c"} Dec 05 12:58:05 crc kubenswrapper[4711]: I1205 12:58:05.175948 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"41cbfd27-4dc3-4989-8e6d-518ebc6c95c6","Type":"ContainerStarted","Data":"55337230c2284f2c2ce81ad1f7c3f9a46c44b4caca4d64f87ad2c78aa0e17b8a"} Dec 05 12:58:05 crc kubenswrapper[4711]: I1205 12:58:05.178456 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"ad25585d-a381-43fc-8254-91ed6b58e1da","Type":"ContainerStarted","Data":"232c703ebf913e78ba00c95634fbe50753a222c5afb2f7b9e71311dca12c89fe"} Dec 05 12:58:05 crc kubenswrapper[4711]: I1205 12:58:05.180463 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"73f78a38-93dc-4cf3-9d10-51ff489c6aa5","Type":"ContainerStarted","Data":"a20cd4e0c40ff935ed35cc687746406004acb1a166f6f0a9a21027d8247e687d"} Dec 05 12:58:05 crc kubenswrapper[4711]: I1205 12:58:05.206202 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-nfs-0" podStartSLOduration=2.954422527 podStartE2EDuration="3.206180471s" podCreationTimestamp="2025-12-05 12:58:02 +0000 UTC" firstStartedPulling="2025-12-05 12:58:03.457682933 +0000 UTC m=+2929.042005263" lastFinishedPulling="2025-12-05 12:58:03.709440867 +0000 UTC m=+2929.293763207" observedRunningTime="2025-12-05 12:58:05.199099717 +0000 UTC m=+2930.783422067" watchObservedRunningTime="2025-12-05 12:58:05.206180471 +0000 UTC m=+2930.790502801" Dec 05 12:58:05 crc kubenswrapper[4711]: I1205 12:58:05.227987 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.017915612 podStartE2EDuration="3.227965638s" podCreationTimestamp="2025-12-05 12:58:02 +0000 UTC" firstStartedPulling="2025-12-05 12:58:03.28184609 +0000 UTC m=+2928.866168420" lastFinishedPulling="2025-12-05 12:58:03.491896116 +0000 UTC m=+2929.076218446" observedRunningTime="2025-12-05 12:58:05.226934133 +0000 UTC m=+2930.811256463" watchObservedRunningTime="2025-12-05 12:58:05.227965638 +0000 UTC m=+2930.812287968" Dec 05 12:58:05 crc kubenswrapper[4711]: I1205 12:58:05.256754 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-nfs-2-0" podStartSLOduration=3.087056575 podStartE2EDuration="3.256734027s" podCreationTimestamp="2025-12-05 12:58:02 +0000 UTC" firstStartedPulling="2025-12-05 12:58:03.544575424 +0000 UTC m=+2929.128897754" lastFinishedPulling="2025-12-05 12:58:03.714252876 +0000 UTC m=+2929.298575206" observedRunningTime="2025-12-05 12:58:05.255146168 +0000 UTC m=+2930.839468498" watchObservedRunningTime="2025-12-05 12:58:05.256734027 +0000 UTC m=+2930.841056357" Dec 05 12:58:07 crc kubenswrapper[4711]: I1205 12:58:07.724617 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 05 12:58:07 crc kubenswrapper[4711]: I1205 12:58:07.825353 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:07 crc kubenswrapper[4711]: I1205 12:58:07.854174 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:12 crc kubenswrapper[4711]: I1205 12:58:12.904761 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 05 12:58:13 crc kubenswrapper[4711]: I1205 12:58:13.048682 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-nfs-0" Dec 05 12:58:13 crc kubenswrapper[4711]: I1205 12:58:13.199287 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-nfs-2-0" Dec 05 12:58:17 crc kubenswrapper[4711]: I1205 12:58:17.683480 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:58:17 crc kubenswrapper[4711]: E1205 12:58:17.684450 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:58:28 crc kubenswrapper[4711]: I1205 12:58:28.692319 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:58:28 crc kubenswrapper[4711]: E1205 12:58:28.693148 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:58:43 crc kubenswrapper[4711]: I1205 12:58:43.683982 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:58:43 crc kubenswrapper[4711]: E1205 12:58:43.684650 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.613367 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hms5w"] Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.619204 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.644644 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hms5w"] Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.737433 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-catalog-content\") pod \"certified-operators-hms5w\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.737652 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94nvp\" (UniqueName: \"kubernetes.io/projected/5265f562-81ca-47af-a8a8-1360951bb689-kube-api-access-94nvp\") pod \"certified-operators-hms5w\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.737858 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-utilities\") pod \"certified-operators-hms5w\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.839757 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-utilities\") pod \"certified-operators-hms5w\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.840294 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-utilities\") pod \"certified-operators-hms5w\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.841903 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-catalog-content\") pod \"certified-operators-hms5w\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.842425 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-catalog-content\") pod \"certified-operators-hms5w\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.842751 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94nvp\" (UniqueName: \"kubernetes.io/projected/5265f562-81ca-47af-a8a8-1360951bb689-kube-api-access-94nvp\") pod \"certified-operators-hms5w\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.877503 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94nvp\" (UniqueName: \"kubernetes.io/projected/5265f562-81ca-47af-a8a8-1360951bb689-kube-api-access-94nvp\") pod \"certified-operators-hms5w\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:51 crc kubenswrapper[4711]: I1205 12:58:51.942232 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:58:52 crc kubenswrapper[4711]: I1205 12:58:52.421039 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hms5w"] Dec 05 12:58:52 crc kubenswrapper[4711]: I1205 12:58:52.679421 4711 generic.go:334] "Generic (PLEG): container finished" podID="5265f562-81ca-47af-a8a8-1360951bb689" containerID="b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4" exitCode=0 Dec 05 12:58:52 crc kubenswrapper[4711]: I1205 12:58:52.680683 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hms5w" event={"ID":"5265f562-81ca-47af-a8a8-1360951bb689","Type":"ContainerDied","Data":"b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4"} Dec 05 12:58:52 crc kubenswrapper[4711]: I1205 12:58:52.680777 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hms5w" event={"ID":"5265f562-81ca-47af-a8a8-1360951bb689","Type":"ContainerStarted","Data":"b1923376199e6b975de1de8471a5119de61f43c17f63cdf7acaa2fb38d804fca"} Dec 05 12:58:54 crc kubenswrapper[4711]: I1205 12:58:54.683540 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:58:54 crc kubenswrapper[4711]: E1205 12:58:54.684434 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:58:54 crc kubenswrapper[4711]: I1205 12:58:54.706189 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hms5w" event={"ID":"5265f562-81ca-47af-a8a8-1360951bb689","Type":"ContainerStarted","Data":"76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4"} Dec 05 12:58:55 crc kubenswrapper[4711]: I1205 12:58:55.717737 4711 generic.go:334] "Generic (PLEG): container finished" podID="5265f562-81ca-47af-a8a8-1360951bb689" containerID="76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4" exitCode=0 Dec 05 12:58:55 crc kubenswrapper[4711]: I1205 12:58:55.717793 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hms5w" event={"ID":"5265f562-81ca-47af-a8a8-1360951bb689","Type":"ContainerDied","Data":"76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4"} Dec 05 12:58:56 crc kubenswrapper[4711]: I1205 12:58:56.732305 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hms5w" event={"ID":"5265f562-81ca-47af-a8a8-1360951bb689","Type":"ContainerStarted","Data":"e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82"} Dec 05 12:58:56 crc kubenswrapper[4711]: I1205 12:58:56.771902 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hms5w" podStartSLOduration=2.352558558 podStartE2EDuration="5.771877627s" podCreationTimestamp="2025-12-05 12:58:51 +0000 UTC" firstStartedPulling="2025-12-05 12:58:52.681979003 +0000 UTC m=+2978.266301333" lastFinishedPulling="2025-12-05 12:58:56.101298072 +0000 UTC m=+2981.685620402" observedRunningTime="2025-12-05 12:58:56.755302849 +0000 UTC m=+2982.339625179" watchObservedRunningTime="2025-12-05 12:58:56.771877627 +0000 UTC m=+2982.356199967" Dec 05 12:59:01 crc kubenswrapper[4711]: I1205 12:59:01.943068 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:59:01 crc kubenswrapper[4711]: I1205 12:59:01.944004 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:59:01 crc kubenswrapper[4711]: I1205 12:59:01.995604 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:59:02 crc kubenswrapper[4711]: I1205 12:59:02.836521 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:59:02 crc kubenswrapper[4711]: I1205 12:59:02.899059 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hms5w"] Dec 05 12:59:04 crc kubenswrapper[4711]: I1205 12:59:04.805423 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hms5w" podUID="5265f562-81ca-47af-a8a8-1360951bb689" containerName="registry-server" containerID="cri-o://e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82" gracePeriod=2 Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.355451 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.458538 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94nvp\" (UniqueName: \"kubernetes.io/projected/5265f562-81ca-47af-a8a8-1360951bb689-kube-api-access-94nvp\") pod \"5265f562-81ca-47af-a8a8-1360951bb689\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.458716 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-utilities\") pod \"5265f562-81ca-47af-a8a8-1360951bb689\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.458855 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-catalog-content\") pod \"5265f562-81ca-47af-a8a8-1360951bb689\" (UID: \"5265f562-81ca-47af-a8a8-1360951bb689\") " Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.460194 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-utilities" (OuterVolumeSpecName: "utilities") pod "5265f562-81ca-47af-a8a8-1360951bb689" (UID: "5265f562-81ca-47af-a8a8-1360951bb689"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.470478 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5265f562-81ca-47af-a8a8-1360951bb689-kube-api-access-94nvp" (OuterVolumeSpecName: "kube-api-access-94nvp") pod "5265f562-81ca-47af-a8a8-1360951bb689" (UID: "5265f562-81ca-47af-a8a8-1360951bb689"). InnerVolumeSpecName "kube-api-access-94nvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.520723 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5265f562-81ca-47af-a8a8-1360951bb689" (UID: "5265f562-81ca-47af-a8a8-1360951bb689"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.561467 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94nvp\" (UniqueName: \"kubernetes.io/projected/5265f562-81ca-47af-a8a8-1360951bb689-kube-api-access-94nvp\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.561510 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.561520 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5265f562-81ca-47af-a8a8-1360951bb689-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.817773 4711 generic.go:334] "Generic (PLEG): container finished" podID="5265f562-81ca-47af-a8a8-1360951bb689" containerID="e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82" exitCode=0 Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.817820 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hms5w" event={"ID":"5265f562-81ca-47af-a8a8-1360951bb689","Type":"ContainerDied","Data":"e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82"} Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.817866 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hms5w" event={"ID":"5265f562-81ca-47af-a8a8-1360951bb689","Type":"ContainerDied","Data":"b1923376199e6b975de1de8471a5119de61f43c17f63cdf7acaa2fb38d804fca"} Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.817893 4711 scope.go:117] "RemoveContainer" containerID="e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.817891 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hms5w" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.850552 4711 scope.go:117] "RemoveContainer" containerID="76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.874439 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hms5w"] Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.884685 4711 scope.go:117] "RemoveContainer" containerID="b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.886603 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hms5w"] Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.938364 4711 scope.go:117] "RemoveContainer" containerID="e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82" Dec 05 12:59:05 crc kubenswrapper[4711]: E1205 12:59:05.938912 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82\": container with ID starting with e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82 not found: ID does not exist" containerID="e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.938948 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82"} err="failed to get container status \"e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82\": rpc error: code = NotFound desc = could not find container \"e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82\": container with ID starting with e49c37ff5e3b471b1193de0546a5df8cc1e54ce1aea9e052a16471a407c00f82 not found: ID does not exist" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.938967 4711 scope.go:117] "RemoveContainer" containerID="76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4" Dec 05 12:59:05 crc kubenswrapper[4711]: E1205 12:59:05.939249 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4\": container with ID starting with 76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4 not found: ID does not exist" containerID="76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.939273 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4"} err="failed to get container status \"76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4\": rpc error: code = NotFound desc = could not find container \"76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4\": container with ID starting with 76f8015894a00bc2056b7af6226bbe85ebda2a0572b8c27e5e2d2efdf2fc26d4 not found: ID does not exist" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.939287 4711 scope.go:117] "RemoveContainer" containerID="b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4" Dec 05 12:59:05 crc kubenswrapper[4711]: E1205 12:59:05.939724 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4\": container with ID starting with b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4 not found: ID does not exist" containerID="b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4" Dec 05 12:59:05 crc kubenswrapper[4711]: I1205 12:59:05.939782 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4"} err="failed to get container status \"b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4\": rpc error: code = NotFound desc = could not find container \"b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4\": container with ID starting with b105412cc2b93eec238ca43706b47659e9762e204ff8cb2d0fa290985fcb3ee4 not found: ID does not exist" Dec 05 12:59:06 crc kubenswrapper[4711]: I1205 12:59:06.701797 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5265f562-81ca-47af-a8a8-1360951bb689" path="/var/lib/kubelet/pods/5265f562-81ca-47af-a8a8-1360951bb689/volumes" Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.355816 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.356093 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="prometheus" containerID="cri-o://64db569f013b01f98c7de191fc5cc3bcdd9cb12c08d29fafcb12d74fe786cd5c" gracePeriod=600 Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.356225 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="config-reloader" containerID="cri-o://9a8e0e7b788fc223ff863a7440733725f70b6ad22d09f94970b37cfc05e13032" gracePeriod=600 Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.356250 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="thanos-sidecar" containerID="cri-o://e8378c9832568aa42123949e45f327e71584d9dbf7ab03898265476e95813d8c" gracePeriod=600 Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.839284 4711 generic.go:334] "Generic (PLEG): container finished" podID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerID="e8378c9832568aa42123949e45f327e71584d9dbf7ab03898265476e95813d8c" exitCode=0 Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.839601 4711 generic.go:334] "Generic (PLEG): container finished" podID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerID="9a8e0e7b788fc223ff863a7440733725f70b6ad22d09f94970b37cfc05e13032" exitCode=0 Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.839611 4711 generic.go:334] "Generic (PLEG): container finished" podID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerID="64db569f013b01f98c7de191fc5cc3bcdd9cb12c08d29fafcb12d74fe786cd5c" exitCode=0 Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.839320 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerDied","Data":"e8378c9832568aa42123949e45f327e71584d9dbf7ab03898265476e95813d8c"} Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.839650 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerDied","Data":"9a8e0e7b788fc223ff863a7440733725f70b6ad22d09f94970b37cfc05e13032"} Dec 05 12:59:07 crc kubenswrapper[4711]: I1205 12:59:07.839666 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerDied","Data":"64db569f013b01f98c7de191fc5cc3bcdd9cb12c08d29fafcb12d74fe786cd5c"} Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.379180 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.527024 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw9r5\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-kube-api-access-rw9r5\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.527645 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-thanos-prometheus-http-client-file\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.527699 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-config\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.527746 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-tls-assets\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.527790 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.527930 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f11ac32e-01b9-4424-9a1b-008f367f316e-prometheus-metric-storage-rulefiles-0\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.528321 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f11ac32e-01b9-4424-9a1b-008f367f316e-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.529785 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.529912 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.529955 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-secret-combined-ca-bundle\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.530036 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.530065 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f11ac32e-01b9-4424-9a1b-008f367f316e-config-out\") pod \"f11ac32e-01b9-4424-9a1b-008f367f316e\" (UID: \"f11ac32e-01b9-4424-9a1b-008f367f316e\") " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.530662 4711 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f11ac32e-01b9-4424-9a1b-008f367f316e-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.533983 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.534450 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.534559 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.534690 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-kube-api-access-rw9r5" (OuterVolumeSpecName: "kube-api-access-rw9r5") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "kube-api-access-rw9r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.535020 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.534704 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.535881 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f11ac32e-01b9-4424-9a1b-008f367f316e-config-out" (OuterVolumeSpecName: "config-out") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.535916 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-config" (OuterVolumeSpecName: "config") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.563203 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.615059 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config" (OuterVolumeSpecName: "web-config") pod "f11ac32e-01b9-4424-9a1b-008f367f316e" (UID: "f11ac32e-01b9-4424-9a1b-008f367f316e"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632797 4711 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") on node \"crc\" " Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632824 4711 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632835 4711 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632846 4711 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f11ac32e-01b9-4424-9a1b-008f367f316e-config-out\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632855 4711 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632866 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw9r5\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-kube-api-access-rw9r5\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632875 4711 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632884 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632891 4711 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f11ac32e-01b9-4424-9a1b-008f367f316e-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.632900 4711 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f11ac32e-01b9-4424-9a1b-008f367f316e-web-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.658147 4711 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.658327 4711 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03") on node "crc" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.690031 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:59:08 crc kubenswrapper[4711]: E1205 12:59:08.690307 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.735081 4711 reconciler_common.go:293] "Volume detached for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.851723 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f11ac32e-01b9-4424-9a1b-008f367f316e","Type":"ContainerDied","Data":"931dd710de3ecf077e7d562e76c4152d1ff1776c57a0118ad8ac0a60213c52c8"} Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.851786 4711 scope.go:117] "RemoveContainer" containerID="e8378c9832568aa42123949e45f327e71584d9dbf7ab03898265476e95813d8c" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.851819 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.884855 4711 scope.go:117] "RemoveContainer" containerID="9a8e0e7b788fc223ff863a7440733725f70b6ad22d09f94970b37cfc05e13032" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.890599 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.930426 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.936538 4711 scope.go:117] "RemoveContainer" containerID="64db569f013b01f98c7de191fc5cc3bcdd9cb12c08d29fafcb12d74fe786cd5c" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.952531 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:59:08 crc kubenswrapper[4711]: E1205 12:59:08.953286 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5265f562-81ca-47af-a8a8-1360951bb689" containerName="registry-server" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.953415 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5265f562-81ca-47af-a8a8-1360951bb689" containerName="registry-server" Dec 05 12:59:08 crc kubenswrapper[4711]: E1205 12:59:08.953507 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="thanos-sidecar" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.953609 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="thanos-sidecar" Dec 05 12:59:08 crc kubenswrapper[4711]: E1205 12:59:08.953682 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5265f562-81ca-47af-a8a8-1360951bb689" containerName="extract-content" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.953755 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5265f562-81ca-47af-a8a8-1360951bb689" containerName="extract-content" Dec 05 12:59:08 crc kubenswrapper[4711]: E1205 12:59:08.953822 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="init-config-reloader" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.953899 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="init-config-reloader" Dec 05 12:59:08 crc kubenswrapper[4711]: E1205 12:59:08.953989 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5265f562-81ca-47af-a8a8-1360951bb689" containerName="extract-utilities" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.954062 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5265f562-81ca-47af-a8a8-1360951bb689" containerName="extract-utilities" Dec 05 12:59:08 crc kubenswrapper[4711]: E1205 12:59:08.954162 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="prometheus" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.954235 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="prometheus" Dec 05 12:59:08 crc kubenswrapper[4711]: E1205 12:59:08.954312 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="config-reloader" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.954402 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="config-reloader" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.954742 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="config-reloader" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.954840 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="prometheus" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.954924 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5265f562-81ca-47af-a8a8-1360951bb689" containerName="registry-server" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.954995 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" containerName="thanos-sidecar" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.960248 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.960781 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.962996 4711 scope.go:117] "RemoveContainer" containerID="ab7da21819607da2997545f525ccce52c43e6f028ae6d55066abbe2eed2cce91" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.970801 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.970888 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-jq6ps" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.970948 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.971038 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.970798 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 12:59:08 crc kubenswrapper[4711]: I1205 12:59:08.991666 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.041628 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.041684 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.041739 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.041766 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.041792 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.041883 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.041990 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.042071 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.042107 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.042363 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-config\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.042442 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj2ld\" (UniqueName: \"kubernetes.io/projected/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-kube-api-access-vj2ld\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145586 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145635 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145686 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-config\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145712 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj2ld\" (UniqueName: \"kubernetes.io/projected/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-kube-api-access-vj2ld\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145733 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145752 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145799 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145826 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145853 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145936 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.145974 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.158372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.171257 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.171537 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.173545 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.174044 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.174370 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.176016 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-config\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.199320 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.199374 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0f9bd9950147f59c5b738a088845da7f6ecb4f45a3918a37eadb6099082c1159/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.199316 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.200733 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.210265 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj2ld\" (UniqueName: \"kubernetes.io/projected/5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6-kube-api-access-vj2ld\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.374768 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f8bffb51-1f99-467d-bb38-bce63fa75f03\") pod \"prometheus-metric-storage-0\" (UID: \"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:09 crc kubenswrapper[4711]: I1205 12:59:09.634942 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:10 crc kubenswrapper[4711]: I1205 12:59:10.186623 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:59:10 crc kubenswrapper[4711]: I1205 12:59:10.698130 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f11ac32e-01b9-4424-9a1b-008f367f316e" path="/var/lib/kubelet/pods/f11ac32e-01b9-4424-9a1b-008f367f316e/volumes" Dec 05 12:59:10 crc kubenswrapper[4711]: I1205 12:59:10.872510 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6","Type":"ContainerStarted","Data":"faf1898302794100a06487f80d4a98552592a091f3f64cc356d7de70ac3b7eb4"} Dec 05 12:59:13 crc kubenswrapper[4711]: I1205 12:59:13.898039 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6","Type":"ContainerStarted","Data":"1b3565c6610208be5bba7f0ccd6613c9fb9df7e16a06b873911a8714ecb92cf5"} Dec 05 12:59:20 crc kubenswrapper[4711]: I1205 12:59:20.686891 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:59:20 crc kubenswrapper[4711]: E1205 12:59:20.688007 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:59:21 crc kubenswrapper[4711]: I1205 12:59:21.972367 4711 generic.go:334] "Generic (PLEG): container finished" podID="5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6" containerID="1b3565c6610208be5bba7f0ccd6613c9fb9df7e16a06b873911a8714ecb92cf5" exitCode=0 Dec 05 12:59:21 crc kubenswrapper[4711]: I1205 12:59:21.972440 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6","Type":"ContainerDied","Data":"1b3565c6610208be5bba7f0ccd6613c9fb9df7e16a06b873911a8714ecb92cf5"} Dec 05 12:59:22 crc kubenswrapper[4711]: I1205 12:59:22.984670 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6","Type":"ContainerStarted","Data":"54a4532e08b23be82d294587c66b4465a6d5f166647eaea9ef5785f6f203ce74"} Dec 05 12:59:26 crc kubenswrapper[4711]: I1205 12:59:26.022926 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6","Type":"ContainerStarted","Data":"7e685d62945c929028833f646f2b07560b516c008baf7bd99705c3df1193130e"} Dec 05 12:59:26 crc kubenswrapper[4711]: I1205 12:59:26.023501 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6","Type":"ContainerStarted","Data":"f8084b006d659b4d92e66cfa78ed83d8e22b6d738276c57c927f4f42e978f08b"} Dec 05 12:59:26 crc kubenswrapper[4711]: I1205 12:59:26.056935 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=18.056913056 podStartE2EDuration="18.056913056s" podCreationTimestamp="2025-12-05 12:59:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:59:26.043162907 +0000 UTC m=+3011.627485257" watchObservedRunningTime="2025-12-05 12:59:26.056913056 +0000 UTC m=+3011.641235386" Dec 05 12:59:29 crc kubenswrapper[4711]: I1205 12:59:29.635589 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:32 crc kubenswrapper[4711]: I1205 12:59:32.683305 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:59:32 crc kubenswrapper[4711]: E1205 12:59:32.683874 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:59:39 crc kubenswrapper[4711]: I1205 12:59:39.635875 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:39 crc kubenswrapper[4711]: I1205 12:59:39.643721 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:40 crc kubenswrapper[4711]: I1205 12:59:40.158661 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.563071 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.564875 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.567773 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-8fbf8" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.567904 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.567908 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.568144 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.581199 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.654600 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wkt5q"] Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.656672 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.672548 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wkt5q"] Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.692168 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:59:44 crc kubenswrapper[4711]: E1205 12:59:44.692428 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.692868 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.693012 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.693055 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.693311 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.693354 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-config-data\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.693373 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.693525 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.693559 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfcd5\" (UniqueName: \"kubernetes.io/projected/745cbd1d-0e83-42d6-b6b4-b57638936898-kube-api-access-nfcd5\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.693602 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.796015 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sht7w\" (UniqueName: \"kubernetes.io/projected/6f688bd6-4ad7-4278-83b8-e401da3f6988-kube-api-access-sht7w\") pod \"community-operators-wkt5q\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.796457 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.796642 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.796762 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-config-data\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.796956 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.797078 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfcd5\" (UniqueName: \"kubernetes.io/projected/745cbd1d-0e83-42d6-b6b4-b57638936898-kube-api-access-nfcd5\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.797199 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.797580 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.797769 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-config-data\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.797855 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.807511 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-utilities\") pod \"community-operators-wkt5q\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.807671 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.807787 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-catalog-content\") pod \"community-operators-wkt5q\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.807837 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.807912 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.808552 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.809149 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.811981 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.812175 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.814934 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.821564 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfcd5\" (UniqueName: \"kubernetes.io/projected/745cbd1d-0e83-42d6-b6b4-b57638936898-kube-api-access-nfcd5\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.837480 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.888462 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.909430 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sht7w\" (UniqueName: \"kubernetes.io/projected/6f688bd6-4ad7-4278-83b8-e401da3f6988-kube-api-access-sht7w\") pod \"community-operators-wkt5q\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.909549 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-utilities\") pod \"community-operators-wkt5q\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.909962 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-utilities\") pod \"community-operators-wkt5q\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.910033 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-catalog-content\") pod \"community-operators-wkt5q\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.910276 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-catalog-content\") pod \"community-operators-wkt5q\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:44 crc kubenswrapper[4711]: I1205 12:59:44.930129 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sht7w\" (UniqueName: \"kubernetes.io/projected/6f688bd6-4ad7-4278-83b8-e401da3f6988-kube-api-access-sht7w\") pod \"community-operators-wkt5q\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:45 crc kubenswrapper[4711]: I1205 12:59:45.004997 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wkt5q" Dec 05 12:59:45 crc kubenswrapper[4711]: I1205 12:59:45.522727 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 12:59:45 crc kubenswrapper[4711]: I1205 12:59:45.605647 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wkt5q"] Dec 05 12:59:45 crc kubenswrapper[4711]: W1205 12:59:45.606452 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f688bd6_4ad7_4278_83b8_e401da3f6988.slice/crio-d65c6f332f319352c4fc7f9c99378051dc55d415e2cfe41fce76051409c10438 WatchSource:0}: Error finding container d65c6f332f319352c4fc7f9c99378051dc55d415e2cfe41fce76051409c10438: Status 404 returned error can't find the container with id d65c6f332f319352c4fc7f9c99378051dc55d415e2cfe41fce76051409c10438 Dec 05 12:59:46 crc kubenswrapper[4711]: I1205 12:59:46.230987 4711 generic.go:334] "Generic (PLEG): container finished" podID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerID="954a4d889fddf8d8cd8ee67ff4a10c8093b0355980fd4691218af04a78275685" exitCode=0 Dec 05 12:59:46 crc kubenswrapper[4711]: I1205 12:59:46.231066 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkt5q" event={"ID":"6f688bd6-4ad7-4278-83b8-e401da3f6988","Type":"ContainerDied","Data":"954a4d889fddf8d8cd8ee67ff4a10c8093b0355980fd4691218af04a78275685"} Dec 05 12:59:46 crc kubenswrapper[4711]: I1205 12:59:46.231537 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkt5q" event={"ID":"6f688bd6-4ad7-4278-83b8-e401da3f6988","Type":"ContainerStarted","Data":"d65c6f332f319352c4fc7f9c99378051dc55d415e2cfe41fce76051409c10438"} Dec 05 12:59:46 crc kubenswrapper[4711]: I1205 12:59:46.233356 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"745cbd1d-0e83-42d6-b6b4-b57638936898","Type":"ContainerStarted","Data":"ccc4cc273363a0a17a9e60374d46fe7a066657e560fbc8b8052b5fc4899d1d70"} Dec 05 12:59:56 crc kubenswrapper[4711]: I1205 12:59:56.684916 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 12:59:56 crc kubenswrapper[4711]: E1205 12:59:56.685872 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 12:59:57 crc kubenswrapper[4711]: I1205 12:59:57.374345 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkt5q" event={"ID":"6f688bd6-4ad7-4278-83b8-e401da3f6988","Type":"ContainerStarted","Data":"d8238a3f75406981419197ee579ae1ef420910a0e0fd6b16bfb9b692fab0c809"} Dec 05 12:59:57 crc kubenswrapper[4711]: I1205 12:59:57.377201 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"745cbd1d-0e83-42d6-b6b4-b57638936898","Type":"ContainerStarted","Data":"757264c72003b338c898682caf7fc892bd46721c14e4c9bfe7dc4e8297114dd1"} Dec 05 12:59:57 crc kubenswrapper[4711]: I1205 12:59:57.424642 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.425165068 podStartE2EDuration="14.424620837s" podCreationTimestamp="2025-12-05 12:59:43 +0000 UTC" firstStartedPulling="2025-12-05 12:59:45.526606054 +0000 UTC m=+3031.110928384" lastFinishedPulling="2025-12-05 12:59:56.526061823 +0000 UTC m=+3042.110384153" observedRunningTime="2025-12-05 12:59:57.413529203 +0000 UTC m=+3042.997851553" watchObservedRunningTime="2025-12-05 12:59:57.424620837 +0000 UTC m=+3043.008943167" Dec 05 12:59:58 crc kubenswrapper[4711]: I1205 12:59:58.388273 4711 generic.go:334] "Generic (PLEG): container finished" podID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerID="d8238a3f75406981419197ee579ae1ef420910a0e0fd6b16bfb9b692fab0c809" exitCode=0 Dec 05 12:59:58 crc kubenswrapper[4711]: I1205 12:59:58.388366 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkt5q" event={"ID":"6f688bd6-4ad7-4278-83b8-e401da3f6988","Type":"ContainerDied","Data":"d8238a3f75406981419197ee579ae1ef420910a0e0fd6b16bfb9b692fab0c809"} Dec 05 12:59:59 crc kubenswrapper[4711]: I1205 12:59:59.402770 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkt5q" event={"ID":"6f688bd6-4ad7-4278-83b8-e401da3f6988","Type":"ContainerStarted","Data":"a00ffc41f8dfd0e8b598d7b8312f1dbdeae6068e7088d88fd31b2d05da2d5939"} Dec 05 12:59:59 crc kubenswrapper[4711]: I1205 12:59:59.425497 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wkt5q" podStartSLOduration=2.874577033 podStartE2EDuration="15.425469794s" podCreationTimestamp="2025-12-05 12:59:44 +0000 UTC" firstStartedPulling="2025-12-05 12:59:46.232816818 +0000 UTC m=+3031.817139148" lastFinishedPulling="2025-12-05 12:59:58.783709579 +0000 UTC m=+3044.368031909" observedRunningTime="2025-12-05 12:59:59.423320451 +0000 UTC m=+3045.007642771" watchObservedRunningTime="2025-12-05 12:59:59.425469794 +0000 UTC m=+3045.009792124" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.159114 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65"] Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.160668 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.163745 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.166060 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.168025 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65"] Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.270429 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff8dk\" (UniqueName: \"kubernetes.io/projected/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-kube-api-access-ff8dk\") pod \"collect-profiles-29415660-tfr65\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.270566 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-config-volume\") pod \"collect-profiles-29415660-tfr65\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.270633 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-secret-volume\") pod \"collect-profiles-29415660-tfr65\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.373209 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff8dk\" (UniqueName: \"kubernetes.io/projected/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-kube-api-access-ff8dk\") pod \"collect-profiles-29415660-tfr65\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.373779 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-config-volume\") pod \"collect-profiles-29415660-tfr65\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.374018 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-secret-volume\") pod \"collect-profiles-29415660-tfr65\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.374779 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-config-volume\") pod \"collect-profiles-29415660-tfr65\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.387529 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-secret-volume\") pod \"collect-profiles-29415660-tfr65\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.390320 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff8dk\" (UniqueName: \"kubernetes.io/projected/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-kube-api-access-ff8dk\") pod \"collect-profiles-29415660-tfr65\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:00 crc kubenswrapper[4711]: I1205 13:00:00.490098 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:01 crc kubenswrapper[4711]: I1205 13:00:01.497203 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65"] Dec 05 13:00:01 crc kubenswrapper[4711]: W1205 13:00:01.510594 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d00bbe6_a8a7_473c_83a8_f2bf0ca0f076.slice/crio-8b0f1ce1ff121e10dfc87ec14162820f060aef5301678c57c6327a7b4a0d5e87 WatchSource:0}: Error finding container 8b0f1ce1ff121e10dfc87ec14162820f060aef5301678c57c6327a7b4a0d5e87: Status 404 returned error can't find the container with id 8b0f1ce1ff121e10dfc87ec14162820f060aef5301678c57c6327a7b4a0d5e87 Dec 05 13:00:02 crc kubenswrapper[4711]: I1205 13:00:02.435885 4711 generic.go:334] "Generic (PLEG): container finished" podID="8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076" containerID="b8485a33e4d4bff7394546690a4d5b7aa35c3343afd829abeece65a8825b5049" exitCode=0 Dec 05 13:00:02 crc kubenswrapper[4711]: I1205 13:00:02.436013 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" event={"ID":"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076","Type":"ContainerDied","Data":"b8485a33e4d4bff7394546690a4d5b7aa35c3343afd829abeece65a8825b5049"} Dec 05 13:00:02 crc kubenswrapper[4711]: I1205 13:00:02.436237 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" event={"ID":"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076","Type":"ContainerStarted","Data":"8b0f1ce1ff121e10dfc87ec14162820f060aef5301678c57c6327a7b4a0d5e87"} Dec 05 13:00:03 crc kubenswrapper[4711]: I1205 13:00:03.803187 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:03 crc kubenswrapper[4711]: I1205 13:00:03.946593 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-secret-volume\") pod \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " Dec 05 13:00:03 crc kubenswrapper[4711]: I1205 13:00:03.946755 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff8dk\" (UniqueName: \"kubernetes.io/projected/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-kube-api-access-ff8dk\") pod \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " Dec 05 13:00:03 crc kubenswrapper[4711]: I1205 13:00:03.946843 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-config-volume\") pod \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\" (UID: \"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076\") " Dec 05 13:00:03 crc kubenswrapper[4711]: I1205 13:00:03.947303 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-config-volume" (OuterVolumeSpecName: "config-volume") pod "8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076" (UID: "8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:00:03 crc kubenswrapper[4711]: I1205 13:00:03.947995 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:03 crc kubenswrapper[4711]: I1205 13:00:03.957378 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076" (UID: "8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:00:03 crc kubenswrapper[4711]: I1205 13:00:03.962679 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-kube-api-access-ff8dk" (OuterVolumeSpecName: "kube-api-access-ff8dk") pod "8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076" (UID: "8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076"). InnerVolumeSpecName "kube-api-access-ff8dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:00:04 crc kubenswrapper[4711]: I1205 13:00:04.050450 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:04 crc kubenswrapper[4711]: I1205 13:00:04.050515 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff8dk\" (UniqueName: \"kubernetes.io/projected/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076-kube-api-access-ff8dk\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:04 crc kubenswrapper[4711]: I1205 13:00:04.456496 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" event={"ID":"8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076","Type":"ContainerDied","Data":"8b0f1ce1ff121e10dfc87ec14162820f060aef5301678c57c6327a7b4a0d5e87"} Dec 05 13:00:04 crc kubenswrapper[4711]: I1205 13:00:04.456844 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b0f1ce1ff121e10dfc87ec14162820f060aef5301678c57c6327a7b4a0d5e87" Dec 05 13:00:04 crc kubenswrapper[4711]: I1205 13:00:04.456561 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65" Dec 05 13:00:04 crc kubenswrapper[4711]: I1205 13:00:04.883449 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw"] Dec 05 13:00:04 crc kubenswrapper[4711]: I1205 13:00:04.891736 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-b9zcw"] Dec 05 13:00:05 crc kubenswrapper[4711]: I1205 13:00:05.005409 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wkt5q" Dec 05 13:00:05 crc kubenswrapper[4711]: I1205 13:00:05.005454 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wkt5q" Dec 05 13:00:05 crc kubenswrapper[4711]: I1205 13:00:05.056780 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wkt5q" Dec 05 13:00:05 crc kubenswrapper[4711]: I1205 13:00:05.524441 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wkt5q" Dec 05 13:00:05 crc kubenswrapper[4711]: I1205 13:00:05.575647 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wkt5q"] Dec 05 13:00:06 crc kubenswrapper[4711]: I1205 13:00:06.699328 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c05e59f9-7440-4bce-8767-34085269a17f" path="/var/lib/kubelet/pods/c05e59f9-7440-4bce-8767-34085269a17f/volumes" Dec 05 13:00:07 crc kubenswrapper[4711]: I1205 13:00:07.485926 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wkt5q" podUID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerName="registry-server" containerID="cri-o://a00ffc41f8dfd0e8b598d7b8312f1dbdeae6068e7088d88fd31b2d05da2d5939" gracePeriod=2 Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.502302 4711 generic.go:334] "Generic (PLEG): container finished" podID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerID="a00ffc41f8dfd0e8b598d7b8312f1dbdeae6068e7088d88fd31b2d05da2d5939" exitCode=0 Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.502376 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkt5q" event={"ID":"6f688bd6-4ad7-4278-83b8-e401da3f6988","Type":"ContainerDied","Data":"a00ffc41f8dfd0e8b598d7b8312f1dbdeae6068e7088d88fd31b2d05da2d5939"} Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.502977 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wkt5q" event={"ID":"6f688bd6-4ad7-4278-83b8-e401da3f6988","Type":"ContainerDied","Data":"d65c6f332f319352c4fc7f9c99378051dc55d415e2cfe41fce76051409c10438"} Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.502997 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d65c6f332f319352c4fc7f9c99378051dc55d415e2cfe41fce76051409c10438" Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.521805 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wkt5q" Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.646752 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sht7w\" (UniqueName: \"kubernetes.io/projected/6f688bd6-4ad7-4278-83b8-e401da3f6988-kube-api-access-sht7w\") pod \"6f688bd6-4ad7-4278-83b8-e401da3f6988\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.646977 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-catalog-content\") pod \"6f688bd6-4ad7-4278-83b8-e401da3f6988\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.647051 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-utilities\") pod \"6f688bd6-4ad7-4278-83b8-e401da3f6988\" (UID: \"6f688bd6-4ad7-4278-83b8-e401da3f6988\") " Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.647947 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-utilities" (OuterVolumeSpecName: "utilities") pod "6f688bd6-4ad7-4278-83b8-e401da3f6988" (UID: "6f688bd6-4ad7-4278-83b8-e401da3f6988"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.656665 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f688bd6-4ad7-4278-83b8-e401da3f6988-kube-api-access-sht7w" (OuterVolumeSpecName: "kube-api-access-sht7w") pod "6f688bd6-4ad7-4278-83b8-e401da3f6988" (UID: "6f688bd6-4ad7-4278-83b8-e401da3f6988"). InnerVolumeSpecName "kube-api-access-sht7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.689471 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:00:08 crc kubenswrapper[4711]: E1205 13:00:08.689821 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.707960 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f688bd6-4ad7-4278-83b8-e401da3f6988" (UID: "6f688bd6-4ad7-4278-83b8-e401da3f6988"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.748908 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sht7w\" (UniqueName: \"kubernetes.io/projected/6f688bd6-4ad7-4278-83b8-e401da3f6988-kube-api-access-sht7w\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.748950 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:08 crc kubenswrapper[4711]: I1205 13:00:08.748965 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f688bd6-4ad7-4278-83b8-e401da3f6988-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:09 crc kubenswrapper[4711]: I1205 13:00:09.511259 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wkt5q" Dec 05 13:00:09 crc kubenswrapper[4711]: I1205 13:00:09.549978 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wkt5q"] Dec 05 13:00:09 crc kubenswrapper[4711]: I1205 13:00:09.560137 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wkt5q"] Dec 05 13:00:10 crc kubenswrapper[4711]: I1205 13:00:10.695190 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f688bd6-4ad7-4278-83b8-e401da3f6988" path="/var/lib/kubelet/pods/6f688bd6-4ad7-4278-83b8-e401da3f6988/volumes" Dec 05 13:00:19 crc kubenswrapper[4711]: I1205 13:00:19.683473 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:00:19 crc kubenswrapper[4711]: E1205 13:00:19.684287 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:00:30 crc kubenswrapper[4711]: I1205 13:00:30.803648 4711 scope.go:117] "RemoveContainer" containerID="7882ec2094167fdbdcf01ed01d1559868993309656d9d9f86e571facffce244f" Dec 05 13:00:34 crc kubenswrapper[4711]: I1205 13:00:34.683852 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:00:34 crc kubenswrapper[4711]: E1205 13:00:34.685641 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:00:47 crc kubenswrapper[4711]: I1205 13:00:47.684403 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:00:47 crc kubenswrapper[4711]: E1205 13:00:47.685607 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:00:58 crc kubenswrapper[4711]: I1205 13:00:58.698712 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:00:58 crc kubenswrapper[4711]: E1205 13:00:58.699973 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.163845 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415661-qkrkm"] Dec 05 13:01:00 crc kubenswrapper[4711]: E1205 13:01:00.164659 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerName="extract-utilities" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.164681 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerName="extract-utilities" Dec 05 13:01:00 crc kubenswrapper[4711]: E1205 13:01:00.164698 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerName="registry-server" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.164706 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerName="registry-server" Dec 05 13:01:00 crc kubenswrapper[4711]: E1205 13:01:00.164737 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerName="extract-content" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.164746 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerName="extract-content" Dec 05 13:01:00 crc kubenswrapper[4711]: E1205 13:01:00.164759 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076" containerName="collect-profiles" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.164766 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076" containerName="collect-profiles" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.164986 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f688bd6-4ad7-4278-83b8-e401da3f6988" containerName="registry-server" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.165026 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076" containerName="collect-profiles" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.166262 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.179247 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415661-qkrkm"] Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.321741 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65qfs\" (UniqueName: \"kubernetes.io/projected/80663af4-fba3-4f84-b9cf-687b8f08501a-kube-api-access-65qfs\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.322072 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-config-data\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.322097 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-fernet-keys\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.322141 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-combined-ca-bundle\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.424100 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-config-data\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.424152 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-fernet-keys\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.424216 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-combined-ca-bundle\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.424334 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65qfs\" (UniqueName: \"kubernetes.io/projected/80663af4-fba3-4f84-b9cf-687b8f08501a-kube-api-access-65qfs\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.431599 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-fernet-keys\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.441068 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-config-data\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.446239 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-combined-ca-bundle\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.453122 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65qfs\" (UniqueName: \"kubernetes.io/projected/80663af4-fba3-4f84-b9cf-687b8f08501a-kube-api-access-65qfs\") pod \"keystone-cron-29415661-qkrkm\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.494339 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:00 crc kubenswrapper[4711]: I1205 13:01:00.990240 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415661-qkrkm"] Dec 05 13:01:01 crc kubenswrapper[4711]: I1205 13:01:01.039549 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415661-qkrkm" event={"ID":"80663af4-fba3-4f84-b9cf-687b8f08501a","Type":"ContainerStarted","Data":"25d213b04333c678cc7b485cb7c76e3da50e3aef4ce47d86b535ed055189a296"} Dec 05 13:01:02 crc kubenswrapper[4711]: I1205 13:01:02.052171 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415661-qkrkm" event={"ID":"80663af4-fba3-4f84-b9cf-687b8f08501a","Type":"ContainerStarted","Data":"4237b79b105086d0a587023d671b83606fb21a0e4bdd8d7757fa16c38a37f542"} Dec 05 13:01:02 crc kubenswrapper[4711]: I1205 13:01:02.073185 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415661-qkrkm" podStartSLOduration=2.073159752 podStartE2EDuration="2.073159752s" podCreationTimestamp="2025-12-05 13:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 13:01:02.068448506 +0000 UTC m=+3107.652770856" watchObservedRunningTime="2025-12-05 13:01:02.073159752 +0000 UTC m=+3107.657482082" Dec 05 13:01:05 crc kubenswrapper[4711]: I1205 13:01:05.080499 4711 generic.go:334] "Generic (PLEG): container finished" podID="80663af4-fba3-4f84-b9cf-687b8f08501a" containerID="4237b79b105086d0a587023d671b83606fb21a0e4bdd8d7757fa16c38a37f542" exitCode=0 Dec 05 13:01:05 crc kubenswrapper[4711]: I1205 13:01:05.081051 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415661-qkrkm" event={"ID":"80663af4-fba3-4f84-b9cf-687b8f08501a","Type":"ContainerDied","Data":"4237b79b105086d0a587023d671b83606fb21a0e4bdd8d7757fa16c38a37f542"} Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.460620 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.558793 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-combined-ca-bundle\") pod \"80663af4-fba3-4f84-b9cf-687b8f08501a\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.558897 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-fernet-keys\") pod \"80663af4-fba3-4f84-b9cf-687b8f08501a\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.559046 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-config-data\") pod \"80663af4-fba3-4f84-b9cf-687b8f08501a\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.559180 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65qfs\" (UniqueName: \"kubernetes.io/projected/80663af4-fba3-4f84-b9cf-687b8f08501a-kube-api-access-65qfs\") pod \"80663af4-fba3-4f84-b9cf-687b8f08501a\" (UID: \"80663af4-fba3-4f84-b9cf-687b8f08501a\") " Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.571566 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "80663af4-fba3-4f84-b9cf-687b8f08501a" (UID: "80663af4-fba3-4f84-b9cf-687b8f08501a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.576693 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80663af4-fba3-4f84-b9cf-687b8f08501a-kube-api-access-65qfs" (OuterVolumeSpecName: "kube-api-access-65qfs") pod "80663af4-fba3-4f84-b9cf-687b8f08501a" (UID: "80663af4-fba3-4f84-b9cf-687b8f08501a"). InnerVolumeSpecName "kube-api-access-65qfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.591639 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80663af4-fba3-4f84-b9cf-687b8f08501a" (UID: "80663af4-fba3-4f84-b9cf-687b8f08501a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.620251 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-config-data" (OuterVolumeSpecName: "config-data") pod "80663af4-fba3-4f84-b9cf-687b8f08501a" (UID: "80663af4-fba3-4f84-b9cf-687b8f08501a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.662014 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65qfs\" (UniqueName: \"kubernetes.io/projected/80663af4-fba3-4f84-b9cf-687b8f08501a-kube-api-access-65qfs\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.662055 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.662067 4711 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:06 crc kubenswrapper[4711]: I1205 13:01:06.662104 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80663af4-fba3-4f84-b9cf-687b8f08501a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:06 crc kubenswrapper[4711]: E1205 13:01:06.906339 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80663af4_fba3_4f84_b9cf_687b8f08501a.slice\": RecentStats: unable to find data in memory cache]" Dec 05 13:01:07 crc kubenswrapper[4711]: I1205 13:01:07.099743 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415661-qkrkm" event={"ID":"80663af4-fba3-4f84-b9cf-687b8f08501a","Type":"ContainerDied","Data":"25d213b04333c678cc7b485cb7c76e3da50e3aef4ce47d86b535ed055189a296"} Dec 05 13:01:07 crc kubenswrapper[4711]: I1205 13:01:07.099786 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25d213b04333c678cc7b485cb7c76e3da50e3aef4ce47d86b535ed055189a296" Dec 05 13:01:07 crc kubenswrapper[4711]: I1205 13:01:07.099828 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415661-qkrkm" Dec 05 13:01:09 crc kubenswrapper[4711]: I1205 13:01:09.682930 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:01:09 crc kubenswrapper[4711]: E1205 13:01:09.683601 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:01:22 crc kubenswrapper[4711]: I1205 13:01:22.683776 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:01:22 crc kubenswrapper[4711]: E1205 13:01:22.684691 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:01:33 crc kubenswrapper[4711]: I1205 13:01:33.684056 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:01:33 crc kubenswrapper[4711]: E1205 13:01:33.684788 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:01:45 crc kubenswrapper[4711]: I1205 13:01:45.683529 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:01:45 crc kubenswrapper[4711]: E1205 13:01:45.684527 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:01:59 crc kubenswrapper[4711]: I1205 13:01:59.683300 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:01:59 crc kubenswrapper[4711]: E1205 13:01:59.684228 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:02:10 crc kubenswrapper[4711]: I1205 13:02:10.684055 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:02:10 crc kubenswrapper[4711]: E1205 13:02:10.684924 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:02:24 crc kubenswrapper[4711]: I1205 13:02:24.683446 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:02:24 crc kubenswrapper[4711]: E1205 13:02:24.684357 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:02:37 crc kubenswrapper[4711]: I1205 13:02:37.684264 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:02:37 crc kubenswrapper[4711]: E1205 13:02:37.685251 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:02:49 crc kubenswrapper[4711]: I1205 13:02:49.682985 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:02:50 crc kubenswrapper[4711]: I1205 13:02:50.130512 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"d5ce77d61c9b7e1965189bc5c054fe0c804f2c2909089dad9e2d749d543bd1ad"} Dec 05 13:05:18 crc kubenswrapper[4711]: I1205 13:05:18.300876 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:05:18 crc kubenswrapper[4711]: I1205 13:05:18.301518 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:05:48 crc kubenswrapper[4711]: I1205 13:05:48.300928 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:05:48 crc kubenswrapper[4711]: I1205 13:05:48.301546 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.458322 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l6jl9"] Dec 05 13:06:17 crc kubenswrapper[4711]: E1205 13:06:17.459308 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80663af4-fba3-4f84-b9cf-687b8f08501a" containerName="keystone-cron" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.459322 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="80663af4-fba3-4f84-b9cf-687b8f08501a" containerName="keystone-cron" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.459564 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="80663af4-fba3-4f84-b9cf-687b8f08501a" containerName="keystone-cron" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.460976 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.475312 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6jl9"] Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.591780 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-catalog-content\") pod \"redhat-marketplace-l6jl9\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.592081 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8njj\" (UniqueName: \"kubernetes.io/projected/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-kube-api-access-g8njj\") pod \"redhat-marketplace-l6jl9\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.592453 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-utilities\") pod \"redhat-marketplace-l6jl9\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.693963 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8njj\" (UniqueName: \"kubernetes.io/projected/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-kube-api-access-g8njj\") pod \"redhat-marketplace-l6jl9\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.694101 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-utilities\") pod \"redhat-marketplace-l6jl9\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.694187 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-catalog-content\") pod \"redhat-marketplace-l6jl9\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.694697 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-catalog-content\") pod \"redhat-marketplace-l6jl9\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.694694 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-utilities\") pod \"redhat-marketplace-l6jl9\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.713610 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8njj\" (UniqueName: \"kubernetes.io/projected/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-kube-api-access-g8njj\") pod \"redhat-marketplace-l6jl9\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:17 crc kubenswrapper[4711]: I1205 13:06:17.778571 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.300562 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.300905 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.300950 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.301749 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d5ce77d61c9b7e1965189bc5c054fe0c804f2c2909089dad9e2d749d543bd1ad"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.301809 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://d5ce77d61c9b7e1965189bc5c054fe0c804f2c2909089dad9e2d749d543bd1ad" gracePeriod=600 Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.323433 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6jl9"] Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.450006 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6jl9" event={"ID":"1e5ab933-3c82-424c-9d8f-0a9fe67e7610","Type":"ContainerStarted","Data":"630721f71bbd65a012e38304a201cbf6457021bfde4aacdcc6ea3097cd98f962"} Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.453448 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="d5ce77d61c9b7e1965189bc5c054fe0c804f2c2909089dad9e2d749d543bd1ad" exitCode=0 Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.453497 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"d5ce77d61c9b7e1965189bc5c054fe0c804f2c2909089dad9e2d749d543bd1ad"} Dec 05 13:06:18 crc kubenswrapper[4711]: I1205 13:06:18.453535 4711 scope.go:117] "RemoveContainer" containerID="2b5144d9c3af6d6530dac6d59b5dd25a289138499b06025e2772fc57ea915bfd" Dec 05 13:06:19 crc kubenswrapper[4711]: I1205 13:06:19.467864 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9"} Dec 05 13:06:19 crc kubenswrapper[4711]: I1205 13:06:19.470941 4711 generic.go:334] "Generic (PLEG): container finished" podID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerID="3d09fd5ea1751146e9584853c81dc3fb599445c6356d2731c6e1084c190c5251" exitCode=0 Dec 05 13:06:19 crc kubenswrapper[4711]: I1205 13:06:19.470978 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6jl9" event={"ID":"1e5ab933-3c82-424c-9d8f-0a9fe67e7610","Type":"ContainerDied","Data":"3d09fd5ea1751146e9584853c81dc3fb599445c6356d2731c6e1084c190c5251"} Dec 05 13:06:19 crc kubenswrapper[4711]: I1205 13:06:19.474073 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:06:21 crc kubenswrapper[4711]: I1205 13:06:21.492044 4711 generic.go:334] "Generic (PLEG): container finished" podID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerID="19a17ffedf46f4a6db1f5dce135ca197624db948838845733c35241466a007f0" exitCode=0 Dec 05 13:06:21 crc kubenswrapper[4711]: I1205 13:06:21.492135 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6jl9" event={"ID":"1e5ab933-3c82-424c-9d8f-0a9fe67e7610","Type":"ContainerDied","Data":"19a17ffedf46f4a6db1f5dce135ca197624db948838845733c35241466a007f0"} Dec 05 13:06:22 crc kubenswrapper[4711]: I1205 13:06:22.504713 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6jl9" event={"ID":"1e5ab933-3c82-424c-9d8f-0a9fe67e7610","Type":"ContainerStarted","Data":"3481b4660bb6f0a40b4b50ff9412b79c3a94971810af7a3bec028b37465cfa7c"} Dec 05 13:06:22 crc kubenswrapper[4711]: I1205 13:06:22.524735 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l6jl9" podStartSLOduration=3.048275792 podStartE2EDuration="5.524716964s" podCreationTimestamp="2025-12-05 13:06:17 +0000 UTC" firstStartedPulling="2025-12-05 13:06:19.473631535 +0000 UTC m=+3425.057953865" lastFinishedPulling="2025-12-05 13:06:21.950072707 +0000 UTC m=+3427.534395037" observedRunningTime="2025-12-05 13:06:22.522722504 +0000 UTC m=+3428.107044834" watchObservedRunningTime="2025-12-05 13:06:22.524716964 +0000 UTC m=+3428.109039294" Dec 05 13:06:27 crc kubenswrapper[4711]: I1205 13:06:27.780371 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:27 crc kubenswrapper[4711]: I1205 13:06:27.781166 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:27 crc kubenswrapper[4711]: I1205 13:06:27.833108 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:28 crc kubenswrapper[4711]: I1205 13:06:28.607907 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:28 crc kubenswrapper[4711]: I1205 13:06:28.659609 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6jl9"] Dec 05 13:06:30 crc kubenswrapper[4711]: I1205 13:06:30.574956 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l6jl9" podUID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerName="registry-server" containerID="cri-o://3481b4660bb6f0a40b4b50ff9412b79c3a94971810af7a3bec028b37465cfa7c" gracePeriod=2 Dec 05 13:06:30 crc kubenswrapper[4711]: I1205 13:06:30.958353 4711 scope.go:117] "RemoveContainer" containerID="954a4d889fddf8d8cd8ee67ff4a10c8093b0355980fd4691218af04a78275685" Dec 05 13:06:31 crc kubenswrapper[4711]: I1205 13:06:31.006441 4711 scope.go:117] "RemoveContainer" containerID="a00ffc41f8dfd0e8b598d7b8312f1dbdeae6068e7088d88fd31b2d05da2d5939" Dec 05 13:06:31 crc kubenswrapper[4711]: I1205 13:06:31.028730 4711 scope.go:117] "RemoveContainer" containerID="d8238a3f75406981419197ee579ae1ef420910a0e0fd6b16bfb9b692fab0c809" Dec 05 13:06:31 crc kubenswrapper[4711]: I1205 13:06:31.585697 4711 generic.go:334] "Generic (PLEG): container finished" podID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerID="3481b4660bb6f0a40b4b50ff9412b79c3a94971810af7a3bec028b37465cfa7c" exitCode=0 Dec 05 13:06:31 crc kubenswrapper[4711]: I1205 13:06:31.585736 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6jl9" event={"ID":"1e5ab933-3c82-424c-9d8f-0a9fe67e7610","Type":"ContainerDied","Data":"3481b4660bb6f0a40b4b50ff9412b79c3a94971810af7a3bec028b37465cfa7c"} Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.270189 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.344458 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8njj\" (UniqueName: \"kubernetes.io/projected/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-kube-api-access-g8njj\") pod \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.344858 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-utilities\") pod \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.344997 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-catalog-content\") pod \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\" (UID: \"1e5ab933-3c82-424c-9d8f-0a9fe67e7610\") " Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.354378 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-utilities" (OuterVolumeSpecName: "utilities") pod "1e5ab933-3c82-424c-9d8f-0a9fe67e7610" (UID: "1e5ab933-3c82-424c-9d8f-0a9fe67e7610"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.356211 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-kube-api-access-g8njj" (OuterVolumeSpecName: "kube-api-access-g8njj") pod "1e5ab933-3c82-424c-9d8f-0a9fe67e7610" (UID: "1e5ab933-3c82-424c-9d8f-0a9fe67e7610"). InnerVolumeSpecName "kube-api-access-g8njj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.364248 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1e5ab933-3c82-424c-9d8f-0a9fe67e7610" (UID: "1e5ab933-3c82-424c-9d8f-0a9fe67e7610"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.447344 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.447401 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8njj\" (UniqueName: \"kubernetes.io/projected/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-kube-api-access-g8njj\") on node \"crc\" DevicePath \"\"" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.447415 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e5ab933-3c82-424c-9d8f-0a9fe67e7610-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.615576 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6jl9" event={"ID":"1e5ab933-3c82-424c-9d8f-0a9fe67e7610","Type":"ContainerDied","Data":"630721f71bbd65a012e38304a201cbf6457021bfde4aacdcc6ea3097cd98f962"} Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.615634 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l6jl9" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.615645 4711 scope.go:117] "RemoveContainer" containerID="3481b4660bb6f0a40b4b50ff9412b79c3a94971810af7a3bec028b37465cfa7c" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.644746 4711 scope.go:117] "RemoveContainer" containerID="19a17ffedf46f4a6db1f5dce135ca197624db948838845733c35241466a007f0" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.652531 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6jl9"] Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.664833 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6jl9"] Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.678061 4711 scope.go:117] "RemoveContainer" containerID="3d09fd5ea1751146e9584853c81dc3fb599445c6356d2731c6e1084c190c5251" Dec 05 13:06:34 crc kubenswrapper[4711]: I1205 13:06:34.697557 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" path="/var/lib/kubelet/pods/1e5ab933-3c82-424c-9d8f-0a9fe67e7610/volumes" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.277371 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xtrht"] Dec 05 13:07:30 crc kubenswrapper[4711]: E1205 13:07:30.278487 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerName="registry-server" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.278504 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerName="registry-server" Dec 05 13:07:30 crc kubenswrapper[4711]: E1205 13:07:30.278548 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerName="extract-content" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.278556 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerName="extract-content" Dec 05 13:07:30 crc kubenswrapper[4711]: E1205 13:07:30.278578 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerName="extract-utilities" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.278586 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerName="extract-utilities" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.278804 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e5ab933-3c82-424c-9d8f-0a9fe67e7610" containerName="registry-server" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.280354 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.289706 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xtrht"] Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.313578 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp8ws\" (UniqueName: \"kubernetes.io/projected/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-kube-api-access-tp8ws\") pod \"redhat-operators-xtrht\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.313670 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-catalog-content\") pod \"redhat-operators-xtrht\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.313830 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-utilities\") pod \"redhat-operators-xtrht\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.415234 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp8ws\" (UniqueName: \"kubernetes.io/projected/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-kube-api-access-tp8ws\") pod \"redhat-operators-xtrht\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.415329 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-catalog-content\") pod \"redhat-operators-xtrht\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.415359 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-utilities\") pod \"redhat-operators-xtrht\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.415959 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-catalog-content\") pod \"redhat-operators-xtrht\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.416017 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-utilities\") pod \"redhat-operators-xtrht\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.448017 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp8ws\" (UniqueName: \"kubernetes.io/projected/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-kube-api-access-tp8ws\") pod \"redhat-operators-xtrht\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:30 crc kubenswrapper[4711]: I1205 13:07:30.605890 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:31 crc kubenswrapper[4711]: I1205 13:07:31.105525 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xtrht"] Dec 05 13:07:31 crc kubenswrapper[4711]: W1205 13:07:31.105525 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ee0ca5e_59c1_4485_b9c6_1e926cc2831f.slice/crio-bae161a7d4aef7ba201998aa8d4b350acd5356f1e8b124a130e42f82f48b3a99 WatchSource:0}: Error finding container bae161a7d4aef7ba201998aa8d4b350acd5356f1e8b124a130e42f82f48b3a99: Status 404 returned error can't find the container with id bae161a7d4aef7ba201998aa8d4b350acd5356f1e8b124a130e42f82f48b3a99 Dec 05 13:07:31 crc kubenswrapper[4711]: I1205 13:07:31.177614 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtrht" event={"ID":"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f","Type":"ContainerStarted","Data":"bae161a7d4aef7ba201998aa8d4b350acd5356f1e8b124a130e42f82f48b3a99"} Dec 05 13:07:32 crc kubenswrapper[4711]: I1205 13:07:32.188429 4711 generic.go:334] "Generic (PLEG): container finished" podID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerID="aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971" exitCode=0 Dec 05 13:07:32 crc kubenswrapper[4711]: I1205 13:07:32.188490 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtrht" event={"ID":"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f","Type":"ContainerDied","Data":"aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971"} Dec 05 13:07:33 crc kubenswrapper[4711]: I1205 13:07:33.199426 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtrht" event={"ID":"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f","Type":"ContainerStarted","Data":"9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df"} Dec 05 13:07:36 crc kubenswrapper[4711]: I1205 13:07:36.228957 4711 generic.go:334] "Generic (PLEG): container finished" podID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerID="9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df" exitCode=0 Dec 05 13:07:36 crc kubenswrapper[4711]: I1205 13:07:36.229031 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtrht" event={"ID":"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f","Type":"ContainerDied","Data":"9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df"} Dec 05 13:07:37 crc kubenswrapper[4711]: I1205 13:07:37.240498 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtrht" event={"ID":"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f","Type":"ContainerStarted","Data":"a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33"} Dec 05 13:07:37 crc kubenswrapper[4711]: I1205 13:07:37.264208 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xtrht" podStartSLOduration=2.833169015 podStartE2EDuration="7.264187161s" podCreationTimestamp="2025-12-05 13:07:30 +0000 UTC" firstStartedPulling="2025-12-05 13:07:32.190913658 +0000 UTC m=+3497.775235988" lastFinishedPulling="2025-12-05 13:07:36.621931804 +0000 UTC m=+3502.206254134" observedRunningTime="2025-12-05 13:07:37.261260839 +0000 UTC m=+3502.845583199" watchObservedRunningTime="2025-12-05 13:07:37.264187161 +0000 UTC m=+3502.848509491" Dec 05 13:07:40 crc kubenswrapper[4711]: I1205 13:07:40.606575 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:40 crc kubenswrapper[4711]: I1205 13:07:40.607155 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:41 crc kubenswrapper[4711]: I1205 13:07:41.655819 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xtrht" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerName="registry-server" probeResult="failure" output=< Dec 05 13:07:41 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 13:07:41 crc kubenswrapper[4711]: > Dec 05 13:07:50 crc kubenswrapper[4711]: I1205 13:07:50.699118 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:50 crc kubenswrapper[4711]: I1205 13:07:50.847284 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:50 crc kubenswrapper[4711]: I1205 13:07:50.958986 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xtrht"] Dec 05 13:07:52 crc kubenswrapper[4711]: I1205 13:07:52.375941 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xtrht" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerName="registry-server" containerID="cri-o://a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33" gracePeriod=2 Dec 05 13:07:52 crc kubenswrapper[4711]: I1205 13:07:52.882266 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.073726 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-catalog-content\") pod \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.073765 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-utilities\") pod \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.073824 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp8ws\" (UniqueName: \"kubernetes.io/projected/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-kube-api-access-tp8ws\") pod \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\" (UID: \"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f\") " Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.074659 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-utilities" (OuterVolumeSpecName: "utilities") pod "9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" (UID: "9ee0ca5e-59c1-4485-b9c6-1e926cc2831f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.088003 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-kube-api-access-tp8ws" (OuterVolumeSpecName: "kube-api-access-tp8ws") pod "9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" (UID: "9ee0ca5e-59c1-4485-b9c6-1e926cc2831f"). InnerVolumeSpecName "kube-api-access-tp8ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.176799 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.176856 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp8ws\" (UniqueName: \"kubernetes.io/projected/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-kube-api-access-tp8ws\") on node \"crc\" DevicePath \"\"" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.189154 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" (UID: "9ee0ca5e-59c1-4485-b9c6-1e926cc2831f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.277663 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.386878 4711 generic.go:334] "Generic (PLEG): container finished" podID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerID="a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33" exitCode=0 Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.386934 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtrht" event={"ID":"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f","Type":"ContainerDied","Data":"a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33"} Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.386952 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtrht" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.386966 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtrht" event={"ID":"9ee0ca5e-59c1-4485-b9c6-1e926cc2831f","Type":"ContainerDied","Data":"bae161a7d4aef7ba201998aa8d4b350acd5356f1e8b124a130e42f82f48b3a99"} Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.386988 4711 scope.go:117] "RemoveContainer" containerID="a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.417047 4711 scope.go:117] "RemoveContainer" containerID="9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.423738 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xtrht"] Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.437283 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xtrht"] Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.437926 4711 scope.go:117] "RemoveContainer" containerID="aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.483766 4711 scope.go:117] "RemoveContainer" containerID="a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33" Dec 05 13:07:53 crc kubenswrapper[4711]: E1205 13:07:53.484181 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33\": container with ID starting with a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33 not found: ID does not exist" containerID="a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.484223 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33"} err="failed to get container status \"a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33\": rpc error: code = NotFound desc = could not find container \"a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33\": container with ID starting with a3c6dec376fb14c38cff75b87d2b5b9a143820c25eb56eaf9abda85c5d672a33 not found: ID does not exist" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.484251 4711 scope.go:117] "RemoveContainer" containerID="9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df" Dec 05 13:07:53 crc kubenswrapper[4711]: E1205 13:07:53.484564 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df\": container with ID starting with 9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df not found: ID does not exist" containerID="9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.484599 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df"} err="failed to get container status \"9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df\": rpc error: code = NotFound desc = could not find container \"9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df\": container with ID starting with 9e51cb44b85bf695cb3995d2129ba76c3aa524a249d863cadb77ac30a9ca86df not found: ID does not exist" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.484630 4711 scope.go:117] "RemoveContainer" containerID="aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971" Dec 05 13:07:53 crc kubenswrapper[4711]: E1205 13:07:53.484974 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971\": container with ID starting with aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971 not found: ID does not exist" containerID="aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971" Dec 05 13:07:53 crc kubenswrapper[4711]: I1205 13:07:53.485028 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971"} err="failed to get container status \"aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971\": rpc error: code = NotFound desc = could not find container \"aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971\": container with ID starting with aac4c310258b35e4693279b3140561e48ba3e775c85432894d266426c93a3971 not found: ID does not exist" Dec 05 13:07:54 crc kubenswrapper[4711]: I1205 13:07:54.697872 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" path="/var/lib/kubelet/pods/9ee0ca5e-59c1-4485-b9c6-1e926cc2831f/volumes" Dec 05 13:08:18 crc kubenswrapper[4711]: I1205 13:08:18.301101 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:08:18 crc kubenswrapper[4711]: I1205 13:08:18.301915 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:08:48 crc kubenswrapper[4711]: I1205 13:08:48.301355 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:08:48 crc kubenswrapper[4711]: I1205 13:08:48.301896 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.606887 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vrpjk"] Dec 05 13:08:59 crc kubenswrapper[4711]: E1205 13:08:59.609858 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerName="extract-content" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.609884 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerName="extract-content" Dec 05 13:08:59 crc kubenswrapper[4711]: E1205 13:08:59.609915 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerName="registry-server" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.609922 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerName="registry-server" Dec 05 13:08:59 crc kubenswrapper[4711]: E1205 13:08:59.609940 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerName="extract-utilities" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.609946 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerName="extract-utilities" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.612792 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ee0ca5e-59c1-4485-b9c6-1e926cc2831f" containerName="registry-server" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.619095 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.639544 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vrpjk"] Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.683668 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wmlc\" (UniqueName: \"kubernetes.io/projected/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-kube-api-access-6wmlc\") pod \"certified-operators-vrpjk\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.683745 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-utilities\") pod \"certified-operators-vrpjk\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.683763 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-catalog-content\") pod \"certified-operators-vrpjk\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.785466 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-utilities\") pod \"certified-operators-vrpjk\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.785519 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-catalog-content\") pod \"certified-operators-vrpjk\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.785764 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wmlc\" (UniqueName: \"kubernetes.io/projected/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-kube-api-access-6wmlc\") pod \"certified-operators-vrpjk\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.786343 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-utilities\") pod \"certified-operators-vrpjk\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.786447 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-catalog-content\") pod \"certified-operators-vrpjk\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.805072 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wmlc\" (UniqueName: \"kubernetes.io/projected/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-kube-api-access-6wmlc\") pod \"certified-operators-vrpjk\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:08:59 crc kubenswrapper[4711]: I1205 13:08:59.957622 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:09:00 crc kubenswrapper[4711]: I1205 13:09:00.576972 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vrpjk"] Dec 05 13:09:01 crc kubenswrapper[4711]: I1205 13:09:01.093197 4711 generic.go:334] "Generic (PLEG): container finished" podID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerID="775d979076beecfedd22a0a9365970f46d65db38c799e3be4fba27ac4679819a" exitCode=0 Dec 05 13:09:01 crc kubenswrapper[4711]: I1205 13:09:01.093302 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vrpjk" event={"ID":"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b","Type":"ContainerDied","Data":"775d979076beecfedd22a0a9365970f46d65db38c799e3be4fba27ac4679819a"} Dec 05 13:09:01 crc kubenswrapper[4711]: I1205 13:09:01.093521 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vrpjk" event={"ID":"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b","Type":"ContainerStarted","Data":"cdb6701fc366dba7b2e031f7182941c4a85575bf96dcecb4dfb46c8ef47bfff1"} Dec 05 13:09:03 crc kubenswrapper[4711]: I1205 13:09:03.116369 4711 generic.go:334] "Generic (PLEG): container finished" podID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerID="6442a77f85d52e240da9a795d094c0db27ef2511a4d1154a29873477630f43ec" exitCode=0 Dec 05 13:09:03 crc kubenswrapper[4711]: I1205 13:09:03.116432 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vrpjk" event={"ID":"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b","Type":"ContainerDied","Data":"6442a77f85d52e240da9a795d094c0db27ef2511a4d1154a29873477630f43ec"} Dec 05 13:09:04 crc kubenswrapper[4711]: I1205 13:09:04.143743 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vrpjk" event={"ID":"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b","Type":"ContainerStarted","Data":"099c96212812a06834d93c1b259f0b156b0fc589ff5fd992a49a3cf46065966c"} Dec 05 13:09:09 crc kubenswrapper[4711]: I1205 13:09:09.958588 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:09:09 crc kubenswrapper[4711]: I1205 13:09:09.959005 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:09:10 crc kubenswrapper[4711]: I1205 13:09:10.011729 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:09:10 crc kubenswrapper[4711]: I1205 13:09:10.044603 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vrpjk" podStartSLOduration=8.537484134 podStartE2EDuration="11.044587029s" podCreationTimestamp="2025-12-05 13:08:59 +0000 UTC" firstStartedPulling="2025-12-05 13:09:01.094966495 +0000 UTC m=+3586.679288825" lastFinishedPulling="2025-12-05 13:09:03.60206939 +0000 UTC m=+3589.186391720" observedRunningTime="2025-12-05 13:09:04.166047204 +0000 UTC m=+3589.750369534" watchObservedRunningTime="2025-12-05 13:09:10.044587029 +0000 UTC m=+3595.628909359" Dec 05 13:09:10 crc kubenswrapper[4711]: I1205 13:09:10.248613 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:09:14 crc kubenswrapper[4711]: I1205 13:09:14.961331 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vrpjk"] Dec 05 13:09:14 crc kubenswrapper[4711]: I1205 13:09:14.963185 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vrpjk" podUID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerName="registry-server" containerID="cri-o://099c96212812a06834d93c1b259f0b156b0fc589ff5fd992a49a3cf46065966c" gracePeriod=2 Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.259526 4711 generic.go:334] "Generic (PLEG): container finished" podID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerID="099c96212812a06834d93c1b259f0b156b0fc589ff5fd992a49a3cf46065966c" exitCode=0 Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.259871 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vrpjk" event={"ID":"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b","Type":"ContainerDied","Data":"099c96212812a06834d93c1b259f0b156b0fc589ff5fd992a49a3cf46065966c"} Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.473057 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.609118 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-utilities\") pod \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.609348 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-catalog-content\") pod \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.609508 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wmlc\" (UniqueName: \"kubernetes.io/projected/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-kube-api-access-6wmlc\") pod \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\" (UID: \"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b\") " Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.610059 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-utilities" (OuterVolumeSpecName: "utilities") pod "725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" (UID: "725792c4-3fd2-40d2-b72b-6cb2e80bbd2b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.610788 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.616248 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-kube-api-access-6wmlc" (OuterVolumeSpecName: "kube-api-access-6wmlc") pod "725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" (UID: "725792c4-3fd2-40d2-b72b-6cb2e80bbd2b"). InnerVolumeSpecName "kube-api-access-6wmlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.673370 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" (UID: "725792c4-3fd2-40d2-b72b-6cb2e80bbd2b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.712951 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:09:15 crc kubenswrapper[4711]: I1205 13:09:15.712989 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wmlc\" (UniqueName: \"kubernetes.io/projected/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b-kube-api-access-6wmlc\") on node \"crc\" DevicePath \"\"" Dec 05 13:09:16 crc kubenswrapper[4711]: I1205 13:09:16.271714 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vrpjk" event={"ID":"725792c4-3fd2-40d2-b72b-6cb2e80bbd2b","Type":"ContainerDied","Data":"cdb6701fc366dba7b2e031f7182941c4a85575bf96dcecb4dfb46c8ef47bfff1"} Dec 05 13:09:16 crc kubenswrapper[4711]: I1205 13:09:16.272097 4711 scope.go:117] "RemoveContainer" containerID="099c96212812a06834d93c1b259f0b156b0fc589ff5fd992a49a3cf46065966c" Dec 05 13:09:16 crc kubenswrapper[4711]: I1205 13:09:16.272020 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vrpjk" Dec 05 13:09:16 crc kubenswrapper[4711]: I1205 13:09:16.297801 4711 scope.go:117] "RemoveContainer" containerID="6442a77f85d52e240da9a795d094c0db27ef2511a4d1154a29873477630f43ec" Dec 05 13:09:16 crc kubenswrapper[4711]: I1205 13:09:16.313629 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vrpjk"] Dec 05 13:09:16 crc kubenswrapper[4711]: I1205 13:09:16.318555 4711 scope.go:117] "RemoveContainer" containerID="775d979076beecfedd22a0a9365970f46d65db38c799e3be4fba27ac4679819a" Dec 05 13:09:16 crc kubenswrapper[4711]: I1205 13:09:16.322868 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vrpjk"] Dec 05 13:09:16 crc kubenswrapper[4711]: I1205 13:09:16.694963 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" path="/var/lib/kubelet/pods/725792c4-3fd2-40d2-b72b-6cb2e80bbd2b/volumes" Dec 05 13:09:18 crc kubenswrapper[4711]: I1205 13:09:18.301220 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:09:18 crc kubenswrapper[4711]: I1205 13:09:18.301641 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:09:18 crc kubenswrapper[4711]: I1205 13:09:18.301696 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:09:18 crc kubenswrapper[4711]: I1205 13:09:18.302705 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:09:18 crc kubenswrapper[4711]: I1205 13:09:18.302789 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" gracePeriod=600 Dec 05 13:09:20 crc kubenswrapper[4711]: E1205 13:09:20.249077 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:09:20 crc kubenswrapper[4711]: I1205 13:09:20.328836 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" exitCode=0 Dec 05 13:09:20 crc kubenswrapper[4711]: I1205 13:09:20.328889 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9"} Dec 05 13:09:20 crc kubenswrapper[4711]: I1205 13:09:20.328936 4711 scope.go:117] "RemoveContainer" containerID="d5ce77d61c9b7e1965189bc5c054fe0c804f2c2909089dad9e2d749d543bd1ad" Dec 05 13:09:20 crc kubenswrapper[4711]: I1205 13:09:20.329673 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:09:20 crc kubenswrapper[4711]: E1205 13:09:20.329972 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:09:31 crc kubenswrapper[4711]: I1205 13:09:31.683697 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:09:31 crc kubenswrapper[4711]: E1205 13:09:31.684611 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:09:46 crc kubenswrapper[4711]: I1205 13:09:46.684002 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:09:46 crc kubenswrapper[4711]: E1205 13:09:46.684971 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:09:59 crc kubenswrapper[4711]: I1205 13:09:59.683126 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:09:59 crc kubenswrapper[4711]: E1205 13:09:59.684786 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:10:13 crc kubenswrapper[4711]: I1205 13:10:13.684500 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:10:13 crc kubenswrapper[4711]: E1205 13:10:13.685546 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:10:25 crc kubenswrapper[4711]: I1205 13:10:25.683319 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:10:25 crc kubenswrapper[4711]: E1205 13:10:25.684083 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:10:38 crc kubenswrapper[4711]: I1205 13:10:38.689088 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:10:38 crc kubenswrapper[4711]: E1205 13:10:38.689766 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:10:52 crc kubenswrapper[4711]: I1205 13:10:52.689453 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:10:52 crc kubenswrapper[4711]: E1205 13:10:52.690523 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:11:06 crc kubenswrapper[4711]: I1205 13:11:06.684792 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:11:06 crc kubenswrapper[4711]: E1205 13:11:06.686123 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.164489 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2bmjj"] Dec 05 13:11:12 crc kubenswrapper[4711]: E1205 13:11:12.166491 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerName="extract-content" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.166537 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerName="extract-content" Dec 05 13:11:12 crc kubenswrapper[4711]: E1205 13:11:12.166586 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerName="registry-server" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.166595 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerName="registry-server" Dec 05 13:11:12 crc kubenswrapper[4711]: E1205 13:11:12.166605 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerName="extract-utilities" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.166614 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerName="extract-utilities" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.166880 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="725792c4-3fd2-40d2-b72b-6cb2e80bbd2b" containerName="registry-server" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.169214 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.178205 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2bmjj"] Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.327394 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-catalog-content\") pod \"community-operators-2bmjj\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.327537 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-utilities\") pod \"community-operators-2bmjj\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.327667 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btd96\" (UniqueName: \"kubernetes.io/projected/57f3b2ce-a51b-4074-8646-b13d71f400a6-kube-api-access-btd96\") pod \"community-operators-2bmjj\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.429275 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-catalog-content\") pod \"community-operators-2bmjj\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.429455 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-utilities\") pod \"community-operators-2bmjj\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.429955 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-catalog-content\") pod \"community-operators-2bmjj\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.429980 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-utilities\") pod \"community-operators-2bmjj\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.430137 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btd96\" (UniqueName: \"kubernetes.io/projected/57f3b2ce-a51b-4074-8646-b13d71f400a6-kube-api-access-btd96\") pod \"community-operators-2bmjj\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.455421 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btd96\" (UniqueName: \"kubernetes.io/projected/57f3b2ce-a51b-4074-8646-b13d71f400a6-kube-api-access-btd96\") pod \"community-operators-2bmjj\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:12 crc kubenswrapper[4711]: I1205 13:11:12.512483 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:13 crc kubenswrapper[4711]: I1205 13:11:13.145184 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2bmjj"] Dec 05 13:11:13 crc kubenswrapper[4711]: I1205 13:11:13.734323 4711 generic.go:334] "Generic (PLEG): container finished" podID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerID="a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe" exitCode=0 Dec 05 13:11:13 crc kubenswrapper[4711]: I1205 13:11:13.734473 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmjj" event={"ID":"57f3b2ce-a51b-4074-8646-b13d71f400a6","Type":"ContainerDied","Data":"a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe"} Dec 05 13:11:13 crc kubenswrapper[4711]: I1205 13:11:13.734702 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmjj" event={"ID":"57f3b2ce-a51b-4074-8646-b13d71f400a6","Type":"ContainerStarted","Data":"9043bf13dbf5779729de45e3c16d82042c539f3957d632d1ed0a9428175e28b7"} Dec 05 13:11:14 crc kubenswrapper[4711]: I1205 13:11:14.755997 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmjj" event={"ID":"57f3b2ce-a51b-4074-8646-b13d71f400a6","Type":"ContainerStarted","Data":"b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd"} Dec 05 13:11:15 crc kubenswrapper[4711]: I1205 13:11:15.767546 4711 generic.go:334] "Generic (PLEG): container finished" podID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerID="b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd" exitCode=0 Dec 05 13:11:15 crc kubenswrapper[4711]: I1205 13:11:15.767598 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmjj" event={"ID":"57f3b2ce-a51b-4074-8646-b13d71f400a6","Type":"ContainerDied","Data":"b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd"} Dec 05 13:11:16 crc kubenswrapper[4711]: I1205 13:11:16.782347 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmjj" event={"ID":"57f3b2ce-a51b-4074-8646-b13d71f400a6","Type":"ContainerStarted","Data":"7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d"} Dec 05 13:11:16 crc kubenswrapper[4711]: I1205 13:11:16.818235 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2bmjj" podStartSLOduration=2.372244345 podStartE2EDuration="4.818199009s" podCreationTimestamp="2025-12-05 13:11:12 +0000 UTC" firstStartedPulling="2025-12-05 13:11:13.735909374 +0000 UTC m=+3719.320231704" lastFinishedPulling="2025-12-05 13:11:16.181864028 +0000 UTC m=+3721.766186368" observedRunningTime="2025-12-05 13:11:16.803477058 +0000 UTC m=+3722.387799408" watchObservedRunningTime="2025-12-05 13:11:16.818199009 +0000 UTC m=+3722.402521369" Dec 05 13:11:17 crc kubenswrapper[4711]: I1205 13:11:17.684120 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:11:17 crc kubenswrapper[4711]: E1205 13:11:17.684464 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:11:22 crc kubenswrapper[4711]: I1205 13:11:22.512882 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:22 crc kubenswrapper[4711]: I1205 13:11:22.513571 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:22 crc kubenswrapper[4711]: I1205 13:11:22.562097 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:22 crc kubenswrapper[4711]: I1205 13:11:22.902429 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:22 crc kubenswrapper[4711]: I1205 13:11:22.961437 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2bmjj"] Dec 05 13:11:24 crc kubenswrapper[4711]: I1205 13:11:24.876360 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2bmjj" podUID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerName="registry-server" containerID="cri-o://7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d" gracePeriod=2 Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.404634 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.464850 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-catalog-content\") pod \"57f3b2ce-a51b-4074-8646-b13d71f400a6\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.465124 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-utilities\") pod \"57f3b2ce-a51b-4074-8646-b13d71f400a6\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.466433 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-utilities" (OuterVolumeSpecName: "utilities") pod "57f3b2ce-a51b-4074-8646-b13d71f400a6" (UID: "57f3b2ce-a51b-4074-8646-b13d71f400a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.466854 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btd96\" (UniqueName: \"kubernetes.io/projected/57f3b2ce-a51b-4074-8646-b13d71f400a6-kube-api-access-btd96\") pod \"57f3b2ce-a51b-4074-8646-b13d71f400a6\" (UID: \"57f3b2ce-a51b-4074-8646-b13d71f400a6\") " Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.468075 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.476738 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57f3b2ce-a51b-4074-8646-b13d71f400a6-kube-api-access-btd96" (OuterVolumeSpecName: "kube-api-access-btd96") pod "57f3b2ce-a51b-4074-8646-b13d71f400a6" (UID: "57f3b2ce-a51b-4074-8646-b13d71f400a6"). InnerVolumeSpecName "kube-api-access-btd96". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.530409 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57f3b2ce-a51b-4074-8646-b13d71f400a6" (UID: "57f3b2ce-a51b-4074-8646-b13d71f400a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.569857 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f3b2ce-a51b-4074-8646-b13d71f400a6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.569895 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btd96\" (UniqueName: \"kubernetes.io/projected/57f3b2ce-a51b-4074-8646-b13d71f400a6-kube-api-access-btd96\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.889032 4711 generic.go:334] "Generic (PLEG): container finished" podID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerID="7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d" exitCode=0 Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.889082 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmjj" event={"ID":"57f3b2ce-a51b-4074-8646-b13d71f400a6","Type":"ContainerDied","Data":"7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d"} Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.889114 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmjj" event={"ID":"57f3b2ce-a51b-4074-8646-b13d71f400a6","Type":"ContainerDied","Data":"9043bf13dbf5779729de45e3c16d82042c539f3957d632d1ed0a9428175e28b7"} Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.889136 4711 scope.go:117] "RemoveContainer" containerID="7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.890601 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2bmjj" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.914927 4711 scope.go:117] "RemoveContainer" containerID="b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd" Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.951982 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2bmjj"] Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.963712 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2bmjj"] Dec 05 13:11:25 crc kubenswrapper[4711]: I1205 13:11:25.972288 4711 scope.go:117] "RemoveContainer" containerID="a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe" Dec 05 13:11:26 crc kubenswrapper[4711]: I1205 13:11:26.007376 4711 scope.go:117] "RemoveContainer" containerID="7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d" Dec 05 13:11:26 crc kubenswrapper[4711]: E1205 13:11:26.008383 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d\": container with ID starting with 7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d not found: ID does not exist" containerID="7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d" Dec 05 13:11:26 crc kubenswrapper[4711]: I1205 13:11:26.008469 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d"} err="failed to get container status \"7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d\": rpc error: code = NotFound desc = could not find container \"7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d\": container with ID starting with 7322af325ca9af2f47fa75d91f565068c08fbf19b529b70bc99345c9d7dd510d not found: ID does not exist" Dec 05 13:11:26 crc kubenswrapper[4711]: I1205 13:11:26.008517 4711 scope.go:117] "RemoveContainer" containerID="b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd" Dec 05 13:11:26 crc kubenswrapper[4711]: E1205 13:11:26.009024 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd\": container with ID starting with b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd not found: ID does not exist" containerID="b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd" Dec 05 13:11:26 crc kubenswrapper[4711]: I1205 13:11:26.009064 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd"} err="failed to get container status \"b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd\": rpc error: code = NotFound desc = could not find container \"b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd\": container with ID starting with b4763d4ec481a13794c1cb69bdcef64524d8fbcdd8af3b064ee360ee2e7205bd not found: ID does not exist" Dec 05 13:11:26 crc kubenswrapper[4711]: I1205 13:11:26.009090 4711 scope.go:117] "RemoveContainer" containerID="a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe" Dec 05 13:11:26 crc kubenswrapper[4711]: E1205 13:11:26.009556 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe\": container with ID starting with a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe not found: ID does not exist" containerID="a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe" Dec 05 13:11:26 crc kubenswrapper[4711]: I1205 13:11:26.009597 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe"} err="failed to get container status \"a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe\": rpc error: code = NotFound desc = could not find container \"a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe\": container with ID starting with a72a31bebb68882310326dc06f714eb88649dd4fdc867e32ad3c6cc87c076bbe not found: ID does not exist" Dec 05 13:11:26 crc kubenswrapper[4711]: I1205 13:11:26.696466 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57f3b2ce-a51b-4074-8646-b13d71f400a6" path="/var/lib/kubelet/pods/57f3b2ce-a51b-4074-8646-b13d71f400a6/volumes" Dec 05 13:11:28 crc kubenswrapper[4711]: I1205 13:11:28.694461 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:11:28 crc kubenswrapper[4711]: E1205 13:11:28.695137 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:11:40 crc kubenswrapper[4711]: I1205 13:11:40.683412 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:11:40 crc kubenswrapper[4711]: E1205 13:11:40.684180 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:11:52 crc kubenswrapper[4711]: I1205 13:11:52.684126 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:11:52 crc kubenswrapper[4711]: E1205 13:11:52.686435 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:12:04 crc kubenswrapper[4711]: I1205 13:12:04.682851 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:12:04 crc kubenswrapper[4711]: E1205 13:12:04.683704 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:12:17 crc kubenswrapper[4711]: I1205 13:12:17.683540 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:12:17 crc kubenswrapper[4711]: E1205 13:12:17.684493 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:12:30 crc kubenswrapper[4711]: I1205 13:12:30.683767 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:12:30 crc kubenswrapper[4711]: E1205 13:12:30.684608 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:12:45 crc kubenswrapper[4711]: I1205 13:12:45.683973 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:12:45 crc kubenswrapper[4711]: E1205 13:12:45.684699 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:12:58 crc kubenswrapper[4711]: I1205 13:12:58.698243 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:12:58 crc kubenswrapper[4711]: E1205 13:12:58.699266 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:13:11 crc kubenswrapper[4711]: I1205 13:13:11.683865 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:13:11 crc kubenswrapper[4711]: E1205 13:13:11.684767 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:13:26 crc kubenswrapper[4711]: I1205 13:13:26.701969 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:13:26 crc kubenswrapper[4711]: E1205 13:13:26.703047 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:13:39 crc kubenswrapper[4711]: I1205 13:13:39.683593 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:13:39 crc kubenswrapper[4711]: E1205 13:13:39.684249 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:13:53 crc kubenswrapper[4711]: I1205 13:13:53.683325 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:13:53 crc kubenswrapper[4711]: E1205 13:13:53.684290 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:14:05 crc kubenswrapper[4711]: I1205 13:14:05.683759 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:14:05 crc kubenswrapper[4711]: E1205 13:14:05.684758 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:14:17 crc kubenswrapper[4711]: I1205 13:14:17.682985 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:14:17 crc kubenswrapper[4711]: E1205 13:14:17.683827 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:14:28 crc kubenswrapper[4711]: I1205 13:14:28.690034 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:14:29 crc kubenswrapper[4711]: I1205 13:14:29.764788 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"f6a10c46f5b379deeaa4c83d83fef060bb196014a3f91f336eb789e7cdbff215"} Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.191221 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8"] Dec 05 13:15:00 crc kubenswrapper[4711]: E1205 13:15:00.192275 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerName="extract-utilities" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.192293 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerName="extract-utilities" Dec 05 13:15:00 crc kubenswrapper[4711]: E1205 13:15:00.192321 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerName="registry-server" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.192329 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerName="registry-server" Dec 05 13:15:00 crc kubenswrapper[4711]: E1205 13:15:00.192355 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerName="extract-content" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.192363 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerName="extract-content" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.192663 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="57f3b2ce-a51b-4074-8646-b13d71f400a6" containerName="registry-server" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.193556 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.198215 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.198415 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.214167 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8"] Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.234205 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-secret-volume\") pod \"collect-profiles-29415675-mnpr8\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.234641 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldmvx\" (UniqueName: \"kubernetes.io/projected/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-kube-api-access-ldmvx\") pod \"collect-profiles-29415675-mnpr8\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.234843 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-config-volume\") pod \"collect-profiles-29415675-mnpr8\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.336716 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-secret-volume\") pod \"collect-profiles-29415675-mnpr8\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.336805 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldmvx\" (UniqueName: \"kubernetes.io/projected/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-kube-api-access-ldmvx\") pod \"collect-profiles-29415675-mnpr8\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.336879 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-config-volume\") pod \"collect-profiles-29415675-mnpr8\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.339055 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-config-volume\") pod \"collect-profiles-29415675-mnpr8\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.347038 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-secret-volume\") pod \"collect-profiles-29415675-mnpr8\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.355271 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldmvx\" (UniqueName: \"kubernetes.io/projected/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-kube-api-access-ldmvx\") pod \"collect-profiles-29415675-mnpr8\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:00 crc kubenswrapper[4711]: I1205 13:15:00.514210 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:01 crc kubenswrapper[4711]: I1205 13:15:01.017677 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8"] Dec 05 13:15:01 crc kubenswrapper[4711]: I1205 13:15:01.050689 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" event={"ID":"12e2758f-eb96-403e-bbf8-6c7bbc3bc034","Type":"ContainerStarted","Data":"26faa73c4337ea40922abdb537ecebbbb7c2cd7490a38b44cdb5947ae401bf20"} Dec 05 13:15:02 crc kubenswrapper[4711]: I1205 13:15:02.061060 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" event={"ID":"12e2758f-eb96-403e-bbf8-6c7bbc3bc034","Type":"ContainerStarted","Data":"2f36490362488c78986ad15bd4a09d20672bc23dbdaa3099b1ce39d317894436"} Dec 05 13:15:03 crc kubenswrapper[4711]: I1205 13:15:03.072416 4711 generic.go:334] "Generic (PLEG): container finished" podID="12e2758f-eb96-403e-bbf8-6c7bbc3bc034" containerID="2f36490362488c78986ad15bd4a09d20672bc23dbdaa3099b1ce39d317894436" exitCode=0 Dec 05 13:15:03 crc kubenswrapper[4711]: I1205 13:15:03.072481 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" event={"ID":"12e2758f-eb96-403e-bbf8-6c7bbc3bc034","Type":"ContainerDied","Data":"2f36490362488c78986ad15bd4a09d20672bc23dbdaa3099b1ce39d317894436"} Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.485209 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.634889 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-secret-volume\") pod \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.635053 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-config-volume\") pod \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.635112 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldmvx\" (UniqueName: \"kubernetes.io/projected/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-kube-api-access-ldmvx\") pod \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\" (UID: \"12e2758f-eb96-403e-bbf8-6c7bbc3bc034\") " Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.635977 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-config-volume" (OuterVolumeSpecName: "config-volume") pod "12e2758f-eb96-403e-bbf8-6c7bbc3bc034" (UID: "12e2758f-eb96-403e-bbf8-6c7bbc3bc034"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.642136 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-kube-api-access-ldmvx" (OuterVolumeSpecName: "kube-api-access-ldmvx") pod "12e2758f-eb96-403e-bbf8-6c7bbc3bc034" (UID: "12e2758f-eb96-403e-bbf8-6c7bbc3bc034"). InnerVolumeSpecName "kube-api-access-ldmvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.642476 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "12e2758f-eb96-403e-bbf8-6c7bbc3bc034" (UID: "12e2758f-eb96-403e-bbf8-6c7bbc3bc034"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.737562 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.737610 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:04 crc kubenswrapper[4711]: I1205 13:15:04.737625 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldmvx\" (UniqueName: \"kubernetes.io/projected/12e2758f-eb96-403e-bbf8-6c7bbc3bc034-kube-api-access-ldmvx\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:05 crc kubenswrapper[4711]: I1205 13:15:05.101474 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" event={"ID":"12e2758f-eb96-403e-bbf8-6c7bbc3bc034","Type":"ContainerDied","Data":"26faa73c4337ea40922abdb537ecebbbb7c2cd7490a38b44cdb5947ae401bf20"} Dec 05 13:15:05 crc kubenswrapper[4711]: I1205 13:15:05.101705 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26faa73c4337ea40922abdb537ecebbbb7c2cd7490a38b44cdb5947ae401bf20" Dec 05 13:15:05 crc kubenswrapper[4711]: I1205 13:15:05.101560 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8" Dec 05 13:15:05 crc kubenswrapper[4711]: I1205 13:15:05.589669 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh"] Dec 05 13:15:05 crc kubenswrapper[4711]: I1205 13:15:05.597972 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-8t5bh"] Dec 05 13:15:06 crc kubenswrapper[4711]: I1205 13:15:06.693885 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e88e06c8-ba12-4a4a-b88d-726fc1c0925a" path="/var/lib/kubelet/pods/e88e06c8-ba12-4a4a-b88d-726fc1c0925a/volumes" Dec 05 13:15:31 crc kubenswrapper[4711]: I1205 13:15:31.340941 4711 scope.go:117] "RemoveContainer" containerID="f1490ed30eb8061924ece8013970050987cb50549740e3dfd27b62ef6df1b09d" Dec 05 13:16:48 crc kubenswrapper[4711]: I1205 13:16:48.300689 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:16:48 crc kubenswrapper[4711]: I1205 13:16:48.301294 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:17:18 crc kubenswrapper[4711]: I1205 13:17:18.301370 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:17:18 crc kubenswrapper[4711]: I1205 13:17:18.302137 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:17:48 crc kubenswrapper[4711]: I1205 13:17:48.300596 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:17:48 crc kubenswrapper[4711]: I1205 13:17:48.301169 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:17:48 crc kubenswrapper[4711]: I1205 13:17:48.301220 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:17:48 crc kubenswrapper[4711]: I1205 13:17:48.302084 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f6a10c46f5b379deeaa4c83d83fef060bb196014a3f91f336eb789e7cdbff215"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:17:48 crc kubenswrapper[4711]: I1205 13:17:48.302130 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://f6a10c46f5b379deeaa4c83d83fef060bb196014a3f91f336eb789e7cdbff215" gracePeriod=600 Dec 05 13:17:49 crc kubenswrapper[4711]: I1205 13:17:49.691984 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="f6a10c46f5b379deeaa4c83d83fef060bb196014a3f91f336eb789e7cdbff215" exitCode=0 Dec 05 13:17:49 crc kubenswrapper[4711]: I1205 13:17:49.692058 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"f6a10c46f5b379deeaa4c83d83fef060bb196014a3f91f336eb789e7cdbff215"} Dec 05 13:17:49 crc kubenswrapper[4711]: I1205 13:17:49.692466 4711 scope.go:117] "RemoveContainer" containerID="c418b4c3fd70fc196a20397ff9c62709f6480dcf4de399de7e2cd0f3583345d9" Dec 05 13:17:51 crc kubenswrapper[4711]: I1205 13:17:51.719899 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb"} Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.474627 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-66cdj"] Dec 05 13:17:56 crc kubenswrapper[4711]: E1205 13:17:56.475732 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12e2758f-eb96-403e-bbf8-6c7bbc3bc034" containerName="collect-profiles" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.475749 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="12e2758f-eb96-403e-bbf8-6c7bbc3bc034" containerName="collect-profiles" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.476012 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="12e2758f-eb96-403e-bbf8-6c7bbc3bc034" containerName="collect-profiles" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.481763 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.504197 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-66cdj"] Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.560808 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kchnm\" (UniqueName: \"kubernetes.io/projected/28d1103d-19cf-4faa-a3d5-89491eaf589c-kube-api-access-kchnm\") pod \"redhat-operators-66cdj\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.560859 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-utilities\") pod \"redhat-operators-66cdj\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.560895 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-catalog-content\") pod \"redhat-operators-66cdj\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.662788 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-catalog-content\") pod \"redhat-operators-66cdj\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.663006 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kchnm\" (UniqueName: \"kubernetes.io/projected/28d1103d-19cf-4faa-a3d5-89491eaf589c-kube-api-access-kchnm\") pod \"redhat-operators-66cdj\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.663031 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-utilities\") pod \"redhat-operators-66cdj\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.663517 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-utilities\") pod \"redhat-operators-66cdj\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.663791 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-catalog-content\") pod \"redhat-operators-66cdj\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.706523 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kchnm\" (UniqueName: \"kubernetes.io/projected/28d1103d-19cf-4faa-a3d5-89491eaf589c-kube-api-access-kchnm\") pod \"redhat-operators-66cdj\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:56 crc kubenswrapper[4711]: I1205 13:17:56.804600 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:17:57 crc kubenswrapper[4711]: I1205 13:17:57.343108 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-66cdj"] Dec 05 13:17:58 crc kubenswrapper[4711]: I1205 13:17:58.787323 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66cdj" event={"ID":"28d1103d-19cf-4faa-a3d5-89491eaf589c","Type":"ContainerStarted","Data":"e61113b80da8c20795bd40570d5e93dfac5a2cdee7a4fb30be80662d772fe6d1"} Dec 05 13:18:02 crc kubenswrapper[4711]: I1205 13:18:02.827247 4711 generic.go:334] "Generic (PLEG): container finished" podID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerID="933d8d3d109a70e7d3b096eb9c0984f5323f9f5c69b2301e4daf6476bc9d7435" exitCode=0 Dec 05 13:18:02 crc kubenswrapper[4711]: I1205 13:18:02.827292 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66cdj" event={"ID":"28d1103d-19cf-4faa-a3d5-89491eaf589c","Type":"ContainerDied","Data":"933d8d3d109a70e7d3b096eb9c0984f5323f9f5c69b2301e4daf6476bc9d7435"} Dec 05 13:18:02 crc kubenswrapper[4711]: I1205 13:18:02.829768 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:18:05 crc kubenswrapper[4711]: I1205 13:18:05.457425 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="hostpath-provisioner/csi-hostpathplugin-rb2nh" podUID="df146046-34ea-410b-a342-83bc374306d1" containerName="hostpath-provisioner" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 13:18:09 crc kubenswrapper[4711]: I1205 13:18:09.901553 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66cdj" event={"ID":"28d1103d-19cf-4faa-a3d5-89491eaf589c","Type":"ContainerStarted","Data":"31d417bbd44aadfe521ceb70939362330d4bcf616db752b71e6863993cf6065e"} Dec 05 13:18:21 crc kubenswrapper[4711]: I1205 13:18:21.026189 4711 generic.go:334] "Generic (PLEG): container finished" podID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerID="31d417bbd44aadfe521ceb70939362330d4bcf616db752b71e6863993cf6065e" exitCode=0 Dec 05 13:18:21 crc kubenswrapper[4711]: I1205 13:18:21.026273 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66cdj" event={"ID":"28d1103d-19cf-4faa-a3d5-89491eaf589c","Type":"ContainerDied","Data":"31d417bbd44aadfe521ceb70939362330d4bcf616db752b71e6863993cf6065e"} Dec 05 13:18:25 crc kubenswrapper[4711]: I1205 13:18:25.098209 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66cdj" event={"ID":"28d1103d-19cf-4faa-a3d5-89491eaf589c","Type":"ContainerStarted","Data":"061145c093582e7b4aeaa0fb2b6b48f6a11511d9577f640fba8cf73ece6b20fd"} Dec 05 13:18:25 crc kubenswrapper[4711]: I1205 13:18:25.123843 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-66cdj" podStartSLOduration=7.842960317 podStartE2EDuration="29.123819284s" podCreationTimestamp="2025-12-05 13:17:56 +0000 UTC" firstStartedPulling="2025-12-05 13:18:02.829557163 +0000 UTC m=+4128.413879493" lastFinishedPulling="2025-12-05 13:18:24.11041613 +0000 UTC m=+4149.694738460" observedRunningTime="2025-12-05 13:18:25.119257472 +0000 UTC m=+4150.703579822" watchObservedRunningTime="2025-12-05 13:18:25.123819284 +0000 UTC m=+4150.708141624" Dec 05 13:18:26 crc kubenswrapper[4711]: I1205 13:18:26.805700 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:18:26 crc kubenswrapper[4711]: I1205 13:18:26.807476 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:18:27 crc kubenswrapper[4711]: I1205 13:18:27.866261 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-66cdj" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerName="registry-server" probeResult="failure" output=< Dec 05 13:18:27 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 13:18:27 crc kubenswrapper[4711]: > Dec 05 13:18:36 crc kubenswrapper[4711]: I1205 13:18:36.858879 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:18:36 crc kubenswrapper[4711]: I1205 13:18:36.930534 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:18:37 crc kubenswrapper[4711]: I1205 13:18:37.098563 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-66cdj"] Dec 05 13:18:38 crc kubenswrapper[4711]: I1205 13:18:38.223927 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-66cdj" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerName="registry-server" containerID="cri-o://061145c093582e7b4aeaa0fb2b6b48f6a11511d9577f640fba8cf73ece6b20fd" gracePeriod=2 Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.237582 4711 generic.go:334] "Generic (PLEG): container finished" podID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerID="061145c093582e7b4aeaa0fb2b6b48f6a11511d9577f640fba8cf73ece6b20fd" exitCode=0 Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.237674 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66cdj" event={"ID":"28d1103d-19cf-4faa-a3d5-89491eaf589c","Type":"ContainerDied","Data":"061145c093582e7b4aeaa0fb2b6b48f6a11511d9577f640fba8cf73ece6b20fd"} Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.572082 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.630103 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kchnm\" (UniqueName: \"kubernetes.io/projected/28d1103d-19cf-4faa-a3d5-89491eaf589c-kube-api-access-kchnm\") pod \"28d1103d-19cf-4faa-a3d5-89491eaf589c\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.630396 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-utilities\") pod \"28d1103d-19cf-4faa-a3d5-89491eaf589c\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.630490 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-catalog-content\") pod \"28d1103d-19cf-4faa-a3d5-89491eaf589c\" (UID: \"28d1103d-19cf-4faa-a3d5-89491eaf589c\") " Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.631265 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-utilities" (OuterVolumeSpecName: "utilities") pod "28d1103d-19cf-4faa-a3d5-89491eaf589c" (UID: "28d1103d-19cf-4faa-a3d5-89491eaf589c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.636439 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28d1103d-19cf-4faa-a3d5-89491eaf589c-kube-api-access-kchnm" (OuterVolumeSpecName: "kube-api-access-kchnm") pod "28d1103d-19cf-4faa-a3d5-89491eaf589c" (UID: "28d1103d-19cf-4faa-a3d5-89491eaf589c"). InnerVolumeSpecName "kube-api-access-kchnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.732298 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kchnm\" (UniqueName: \"kubernetes.io/projected/28d1103d-19cf-4faa-a3d5-89491eaf589c-kube-api-access-kchnm\") on node \"crc\" DevicePath \"\"" Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.732325 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.746197 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28d1103d-19cf-4faa-a3d5-89491eaf589c" (UID: "28d1103d-19cf-4faa-a3d5-89491eaf589c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:18:39 crc kubenswrapper[4711]: I1205 13:18:39.834514 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d1103d-19cf-4faa-a3d5-89491eaf589c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:18:40 crc kubenswrapper[4711]: I1205 13:18:40.253101 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66cdj" event={"ID":"28d1103d-19cf-4faa-a3d5-89491eaf589c","Type":"ContainerDied","Data":"e61113b80da8c20795bd40570d5e93dfac5a2cdee7a4fb30be80662d772fe6d1"} Dec 05 13:18:40 crc kubenswrapper[4711]: I1205 13:18:40.253461 4711 scope.go:117] "RemoveContainer" containerID="061145c093582e7b4aeaa0fb2b6b48f6a11511d9577f640fba8cf73ece6b20fd" Dec 05 13:18:40 crc kubenswrapper[4711]: I1205 13:18:40.253187 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66cdj" Dec 05 13:18:40 crc kubenswrapper[4711]: I1205 13:18:40.293193 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-66cdj"] Dec 05 13:18:40 crc kubenswrapper[4711]: I1205 13:18:40.293868 4711 scope.go:117] "RemoveContainer" containerID="31d417bbd44aadfe521ceb70939362330d4bcf616db752b71e6863993cf6065e" Dec 05 13:18:40 crc kubenswrapper[4711]: I1205 13:18:40.302474 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-66cdj"] Dec 05 13:18:40 crc kubenswrapper[4711]: I1205 13:18:40.344880 4711 scope.go:117] "RemoveContainer" containerID="933d8d3d109a70e7d3b096eb9c0984f5323f9f5c69b2301e4daf6476bc9d7435" Dec 05 13:18:40 crc kubenswrapper[4711]: I1205 13:18:40.693395 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" path="/var/lib/kubelet/pods/28d1103d-19cf-4faa-a3d5-89491eaf589c/volumes" Dec 05 13:19:29 crc kubenswrapper[4711]: I1205 13:19:29.524670 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-scheduler-0" podUID="ed1a3b33-3fb6-412a-8bde-03171358617c" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.184:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 13:20:18 crc kubenswrapper[4711]: I1205 13:20:18.300492 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:20:18 crc kubenswrapper[4711]: I1205 13:20:18.301055 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:20:48 crc kubenswrapper[4711]: I1205 13:20:48.301342 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:20:48 crc kubenswrapper[4711]: I1205 13:20:48.301924 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:21:18 crc kubenswrapper[4711]: I1205 13:21:18.300831 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:21:18 crc kubenswrapper[4711]: I1205 13:21:18.301772 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:21:18 crc kubenswrapper[4711]: I1205 13:21:18.301889 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:21:18 crc kubenswrapper[4711]: I1205 13:21:18.303122 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:21:18 crc kubenswrapper[4711]: I1205 13:21:18.303244 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" gracePeriod=600 Dec 05 13:21:18 crc kubenswrapper[4711]: I1205 13:21:18.859987 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" exitCode=0 Dec 05 13:21:18 crc kubenswrapper[4711]: I1205 13:21:18.860097 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb"} Dec 05 13:21:18 crc kubenswrapper[4711]: I1205 13:21:18.860359 4711 scope.go:117] "RemoveContainer" containerID="f6a10c46f5b379deeaa4c83d83fef060bb196014a3f91f336eb789e7cdbff215" Dec 05 13:21:20 crc kubenswrapper[4711]: E1205 13:21:20.640997 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:21:20 crc kubenswrapper[4711]: I1205 13:21:20.899437 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:21:20 crc kubenswrapper[4711]: E1205 13:21:20.899889 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:21:33 crc kubenswrapper[4711]: I1205 13:21:33.683055 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:21:33 crc kubenswrapper[4711]: E1205 13:21:33.683893 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:21:46 crc kubenswrapper[4711]: I1205 13:21:46.683198 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:21:46 crc kubenswrapper[4711]: E1205 13:21:46.684060 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:21:58 crc kubenswrapper[4711]: I1205 13:21:58.708112 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:21:58 crc kubenswrapper[4711]: E1205 13:21:58.709291 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:22:13 crc kubenswrapper[4711]: I1205 13:22:13.683825 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:22:13 crc kubenswrapper[4711]: E1205 13:22:13.684848 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:22:27 crc kubenswrapper[4711]: I1205 13:22:27.683818 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:22:27 crc kubenswrapper[4711]: E1205 13:22:27.684659 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.829005 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xtm4r"] Dec 05 13:22:28 crc kubenswrapper[4711]: E1205 13:22:28.829738 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerName="extract-content" Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.829752 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerName="extract-content" Dec 05 13:22:28 crc kubenswrapper[4711]: E1205 13:22:28.829787 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerName="registry-server" Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.829797 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerName="registry-server" Dec 05 13:22:28 crc kubenswrapper[4711]: E1205 13:22:28.829833 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerName="extract-utilities" Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.829841 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerName="extract-utilities" Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.830064 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="28d1103d-19cf-4faa-a3d5-89491eaf589c" containerName="registry-server" Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.832026 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.841853 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xtm4r"] Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.930920 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-catalog-content\") pod \"community-operators-xtm4r\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.931007 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76t7p\" (UniqueName: \"kubernetes.io/projected/d050bc01-8662-4b0a-b2ab-f59eee3794c8-kube-api-access-76t7p\") pod \"community-operators-xtm4r\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:28 crc kubenswrapper[4711]: I1205 13:22:28.931134 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-utilities\") pod \"community-operators-xtm4r\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:29 crc kubenswrapper[4711]: I1205 13:22:29.033584 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76t7p\" (UniqueName: \"kubernetes.io/projected/d050bc01-8662-4b0a-b2ab-f59eee3794c8-kube-api-access-76t7p\") pod \"community-operators-xtm4r\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:29 crc kubenswrapper[4711]: I1205 13:22:29.033661 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-utilities\") pod \"community-operators-xtm4r\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:29 crc kubenswrapper[4711]: I1205 13:22:29.033799 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-catalog-content\") pod \"community-operators-xtm4r\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:29 crc kubenswrapper[4711]: I1205 13:22:29.034339 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-catalog-content\") pod \"community-operators-xtm4r\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:29 crc kubenswrapper[4711]: I1205 13:22:29.034510 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-utilities\") pod \"community-operators-xtm4r\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:29 crc kubenswrapper[4711]: I1205 13:22:29.063279 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76t7p\" (UniqueName: \"kubernetes.io/projected/d050bc01-8662-4b0a-b2ab-f59eee3794c8-kube-api-access-76t7p\") pod \"community-operators-xtm4r\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:29 crc kubenswrapper[4711]: I1205 13:22:29.157281 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:29 crc kubenswrapper[4711]: I1205 13:22:29.753613 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xtm4r"] Dec 05 13:22:30 crc kubenswrapper[4711]: I1205 13:22:30.579831 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xtm4r" event={"ID":"d050bc01-8662-4b0a-b2ab-f59eee3794c8","Type":"ContainerStarted","Data":"eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce"} Dec 05 13:22:30 crc kubenswrapper[4711]: I1205 13:22:30.580215 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xtm4r" event={"ID":"d050bc01-8662-4b0a-b2ab-f59eee3794c8","Type":"ContainerStarted","Data":"b3060dee41bd35f7cdd07630c4018070d7f4eeaf5850e66ca1bca15be1e0d676"} Dec 05 13:22:31 crc kubenswrapper[4711]: I1205 13:22:31.591725 4711 generic.go:334] "Generic (PLEG): container finished" podID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerID="eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce" exitCode=0 Dec 05 13:22:31 crc kubenswrapper[4711]: I1205 13:22:31.591994 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xtm4r" event={"ID":"d050bc01-8662-4b0a-b2ab-f59eee3794c8","Type":"ContainerDied","Data":"eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce"} Dec 05 13:22:38 crc kubenswrapper[4711]: I1205 13:22:38.657110 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xtm4r" event={"ID":"d050bc01-8662-4b0a-b2ab-f59eee3794c8","Type":"ContainerStarted","Data":"94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495"} Dec 05 13:22:39 crc kubenswrapper[4711]: I1205 13:22:39.670984 4711 generic.go:334] "Generic (PLEG): container finished" podID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerID="94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495" exitCode=0 Dec 05 13:22:39 crc kubenswrapper[4711]: I1205 13:22:39.671063 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xtm4r" event={"ID":"d050bc01-8662-4b0a-b2ab-f59eee3794c8","Type":"ContainerDied","Data":"94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495"} Dec 05 13:22:40 crc kubenswrapper[4711]: I1205 13:22:40.684110 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:22:40 crc kubenswrapper[4711]: E1205 13:22:40.684699 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:22:41 crc kubenswrapper[4711]: I1205 13:22:41.699303 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xtm4r" event={"ID":"d050bc01-8662-4b0a-b2ab-f59eee3794c8","Type":"ContainerStarted","Data":"9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227"} Dec 05 13:22:41 crc kubenswrapper[4711]: I1205 13:22:41.728974 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xtm4r" podStartSLOduration=5.955665979 podStartE2EDuration="13.728954872s" podCreationTimestamp="2025-12-05 13:22:28 +0000 UTC" firstStartedPulling="2025-12-05 13:22:32.602008718 +0000 UTC m=+4398.186331048" lastFinishedPulling="2025-12-05 13:22:40.375297611 +0000 UTC m=+4405.959619941" observedRunningTime="2025-12-05 13:22:41.71547731 +0000 UTC m=+4407.299799660" watchObservedRunningTime="2025-12-05 13:22:41.728954872 +0000 UTC m=+4407.313277202" Dec 05 13:22:49 crc kubenswrapper[4711]: I1205 13:22:49.158558 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:49 crc kubenswrapper[4711]: I1205 13:22:49.159199 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:49 crc kubenswrapper[4711]: I1205 13:22:49.219175 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:49 crc kubenswrapper[4711]: I1205 13:22:49.847577 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:49 crc kubenswrapper[4711]: I1205 13:22:49.915590 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xtm4r"] Dec 05 13:22:51 crc kubenswrapper[4711]: I1205 13:22:51.791045 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xtm4r" podUID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerName="registry-server" containerID="cri-o://9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227" gracePeriod=2 Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.384558 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.478908 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-catalog-content\") pod \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.479423 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-utilities\") pod \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.479669 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76t7p\" (UniqueName: \"kubernetes.io/projected/d050bc01-8662-4b0a-b2ab-f59eee3794c8-kube-api-access-76t7p\") pod \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\" (UID: \"d050bc01-8662-4b0a-b2ab-f59eee3794c8\") " Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.481121 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-utilities" (OuterVolumeSpecName: "utilities") pod "d050bc01-8662-4b0a-b2ab-f59eee3794c8" (UID: "d050bc01-8662-4b0a-b2ab-f59eee3794c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.502686 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d050bc01-8662-4b0a-b2ab-f59eee3794c8-kube-api-access-76t7p" (OuterVolumeSpecName: "kube-api-access-76t7p") pod "d050bc01-8662-4b0a-b2ab-f59eee3794c8" (UID: "d050bc01-8662-4b0a-b2ab-f59eee3794c8"). InnerVolumeSpecName "kube-api-access-76t7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.537257 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d050bc01-8662-4b0a-b2ab-f59eee3794c8" (UID: "d050bc01-8662-4b0a-b2ab-f59eee3794c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.581579 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.581615 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d050bc01-8662-4b0a-b2ab-f59eee3794c8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.581629 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76t7p\" (UniqueName: \"kubernetes.io/projected/d050bc01-8662-4b0a-b2ab-f59eee3794c8-kube-api-access-76t7p\") on node \"crc\" DevicePath \"\"" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.809950 4711 generic.go:334] "Generic (PLEG): container finished" podID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerID="9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227" exitCode=0 Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.810007 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xtm4r" event={"ID":"d050bc01-8662-4b0a-b2ab-f59eee3794c8","Type":"ContainerDied","Data":"9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227"} Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.810052 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xtm4r" event={"ID":"d050bc01-8662-4b0a-b2ab-f59eee3794c8","Type":"ContainerDied","Data":"b3060dee41bd35f7cdd07630c4018070d7f4eeaf5850e66ca1bca15be1e0d676"} Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.810079 4711 scope.go:117] "RemoveContainer" containerID="9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.810084 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xtm4r" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.841461 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xtm4r"] Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.850494 4711 scope.go:117] "RemoveContainer" containerID="94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.852550 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xtm4r"] Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.872797 4711 scope.go:117] "RemoveContainer" containerID="eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.919305 4711 scope.go:117] "RemoveContainer" containerID="9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227" Dec 05 13:22:52 crc kubenswrapper[4711]: E1205 13:22:52.919868 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227\": container with ID starting with 9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227 not found: ID does not exist" containerID="9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.919926 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227"} err="failed to get container status \"9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227\": rpc error: code = NotFound desc = could not find container \"9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227\": container with ID starting with 9fb61ee999c199d94834904e96a0eaca8e1ef194225bb727679a40ca4684b227 not found: ID does not exist" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.919987 4711 scope.go:117] "RemoveContainer" containerID="94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495" Dec 05 13:22:52 crc kubenswrapper[4711]: E1205 13:22:52.920455 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495\": container with ID starting with 94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495 not found: ID does not exist" containerID="94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.920504 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495"} err="failed to get container status \"94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495\": rpc error: code = NotFound desc = could not find container \"94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495\": container with ID starting with 94b07efbbdfb840437fbf293d9f567a8377678f533303ee15e25954907280495 not found: ID does not exist" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.920523 4711 scope.go:117] "RemoveContainer" containerID="eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce" Dec 05 13:22:52 crc kubenswrapper[4711]: E1205 13:22:52.920828 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce\": container with ID starting with eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce not found: ID does not exist" containerID="eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce" Dec 05 13:22:52 crc kubenswrapper[4711]: I1205 13:22:52.920856 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce"} err="failed to get container status \"eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce\": rpc error: code = NotFound desc = could not find container \"eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce\": container with ID starting with eed3f37df5cf6ae00ddfdc8f127bae9afb5b688a343546fe7425b410c4bec0ce not found: ID does not exist" Dec 05 13:22:54 crc kubenswrapper[4711]: I1205 13:22:54.683630 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:22:54 crc kubenswrapper[4711]: E1205 13:22:54.684233 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:22:54 crc kubenswrapper[4711]: I1205 13:22:54.699923 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" path="/var/lib/kubelet/pods/d050bc01-8662-4b0a-b2ab-f59eee3794c8/volumes" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.266593 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2qrlt"] Dec 05 13:22:57 crc kubenswrapper[4711]: E1205 13:22:57.267402 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerName="extract-utilities" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.267432 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerName="extract-utilities" Dec 05 13:22:57 crc kubenswrapper[4711]: E1205 13:22:57.267447 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerName="extract-content" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.267453 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerName="extract-content" Dec 05 13:22:57 crc kubenswrapper[4711]: E1205 13:22:57.267470 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerName="registry-server" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.267476 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerName="registry-server" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.267692 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d050bc01-8662-4b0a-b2ab-f59eee3794c8" containerName="registry-server" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.269087 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.291527 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2qrlt"] Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.390255 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvfvk\" (UniqueName: \"kubernetes.io/projected/9f91d561-82dd-4f73-8151-2738705c812d-kube-api-access-wvfvk\") pod \"certified-operators-2qrlt\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.390650 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-utilities\") pod \"certified-operators-2qrlt\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.390741 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-catalog-content\") pod \"certified-operators-2qrlt\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.492996 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-utilities\") pod \"certified-operators-2qrlt\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.493067 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-catalog-content\") pod \"certified-operators-2qrlt\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.493326 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvfvk\" (UniqueName: \"kubernetes.io/projected/9f91d561-82dd-4f73-8151-2738705c812d-kube-api-access-wvfvk\") pod \"certified-operators-2qrlt\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.493891 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-catalog-content\") pod \"certified-operators-2qrlt\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.494128 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-utilities\") pod \"certified-operators-2qrlt\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.518288 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvfvk\" (UniqueName: \"kubernetes.io/projected/9f91d561-82dd-4f73-8151-2738705c812d-kube-api-access-wvfvk\") pod \"certified-operators-2qrlt\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:57 crc kubenswrapper[4711]: I1205 13:22:57.599139 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:22:58 crc kubenswrapper[4711]: I1205 13:22:58.004428 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2qrlt"] Dec 05 13:22:58 crc kubenswrapper[4711]: W1205 13:22:58.008540 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f91d561_82dd_4f73_8151_2738705c812d.slice/crio-d7a4e8852cb5a9692a1eb5d5aa9f4b06651eeb7ff1dac3b403cda32271077526 WatchSource:0}: Error finding container d7a4e8852cb5a9692a1eb5d5aa9f4b06651eeb7ff1dac3b403cda32271077526: Status 404 returned error can't find the container with id d7a4e8852cb5a9692a1eb5d5aa9f4b06651eeb7ff1dac3b403cda32271077526 Dec 05 13:22:58 crc kubenswrapper[4711]: I1205 13:22:58.883771 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2qrlt" event={"ID":"9f91d561-82dd-4f73-8151-2738705c812d","Type":"ContainerStarted","Data":"d7a4e8852cb5a9692a1eb5d5aa9f4b06651eeb7ff1dac3b403cda32271077526"} Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.674077 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-skmtp"] Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.677097 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.709629 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-skmtp"] Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.861591 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9pnf\" (UniqueName: \"kubernetes.io/projected/c32caba0-55f1-44be-81c7-ddbb46915e7e-kube-api-access-z9pnf\") pod \"redhat-marketplace-skmtp\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.862315 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-catalog-content\") pod \"redhat-marketplace-skmtp\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.862523 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-utilities\") pod \"redhat-marketplace-skmtp\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.922530 4711 generic.go:334] "Generic (PLEG): container finished" podID="9f91d561-82dd-4f73-8151-2738705c812d" containerID="84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837" exitCode=0 Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.922874 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2qrlt" event={"ID":"9f91d561-82dd-4f73-8151-2738705c812d","Type":"ContainerDied","Data":"84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837"} Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.963846 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-utilities\") pod \"redhat-marketplace-skmtp\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.963941 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9pnf\" (UniqueName: \"kubernetes.io/projected/c32caba0-55f1-44be-81c7-ddbb46915e7e-kube-api-access-z9pnf\") pod \"redhat-marketplace-skmtp\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.964033 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-catalog-content\") pod \"redhat-marketplace-skmtp\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.964664 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-utilities\") pod \"redhat-marketplace-skmtp\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.964677 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-catalog-content\") pod \"redhat-marketplace-skmtp\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.984333 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9pnf\" (UniqueName: \"kubernetes.io/projected/c32caba0-55f1-44be-81c7-ddbb46915e7e-kube-api-access-z9pnf\") pod \"redhat-marketplace-skmtp\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:00 crc kubenswrapper[4711]: I1205 13:23:00.993197 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:01 crc kubenswrapper[4711]: I1205 13:23:01.468891 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-skmtp"] Dec 05 13:23:01 crc kubenswrapper[4711]: I1205 13:23:01.949085 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skmtp" event={"ID":"c32caba0-55f1-44be-81c7-ddbb46915e7e","Type":"ContainerStarted","Data":"60d5e64baab7a42963fa0c613f7b029dee1df358fe77bc7d6866e73c678ecd03"} Dec 05 13:23:03 crc kubenswrapper[4711]: I1205 13:23:03.971272 4711 generic.go:334] "Generic (PLEG): container finished" podID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerID="3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d" exitCode=0 Dec 05 13:23:03 crc kubenswrapper[4711]: I1205 13:23:03.971780 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skmtp" event={"ID":"c32caba0-55f1-44be-81c7-ddbb46915e7e","Type":"ContainerDied","Data":"3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d"} Dec 05 13:23:03 crc kubenswrapper[4711]: I1205 13:23:03.974386 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:23:06 crc kubenswrapper[4711]: I1205 13:23:06.999097 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2qrlt" event={"ID":"9f91d561-82dd-4f73-8151-2738705c812d","Type":"ContainerStarted","Data":"8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d"} Dec 05 13:23:07 crc kubenswrapper[4711]: I1205 13:23:07.001804 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skmtp" event={"ID":"c32caba0-55f1-44be-81c7-ddbb46915e7e","Type":"ContainerStarted","Data":"55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4"} Dec 05 13:23:09 crc kubenswrapper[4711]: I1205 13:23:09.683869 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:23:09 crc kubenswrapper[4711]: E1205 13:23:09.684875 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:23:11 crc kubenswrapper[4711]: I1205 13:23:11.039630 4711 generic.go:334] "Generic (PLEG): container finished" podID="9f91d561-82dd-4f73-8151-2738705c812d" containerID="8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d" exitCode=0 Dec 05 13:23:11 crc kubenswrapper[4711]: I1205 13:23:11.039681 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2qrlt" event={"ID":"9f91d561-82dd-4f73-8151-2738705c812d","Type":"ContainerDied","Data":"8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d"} Dec 05 13:23:14 crc kubenswrapper[4711]: I1205 13:23:14.083735 4711 generic.go:334] "Generic (PLEG): container finished" podID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerID="55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4" exitCode=0 Dec 05 13:23:14 crc kubenswrapper[4711]: I1205 13:23:14.083828 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skmtp" event={"ID":"c32caba0-55f1-44be-81c7-ddbb46915e7e","Type":"ContainerDied","Data":"55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4"} Dec 05 13:23:18 crc kubenswrapper[4711]: I1205 13:23:18.123784 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2qrlt" event={"ID":"9f91d561-82dd-4f73-8151-2738705c812d","Type":"ContainerStarted","Data":"45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff"} Dec 05 13:23:18 crc kubenswrapper[4711]: I1205 13:23:18.142434 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2qrlt" podStartSLOduration=4.641571605 podStartE2EDuration="21.142414376s" podCreationTimestamp="2025-12-05 13:22:57 +0000 UTC" firstStartedPulling="2025-12-05 13:23:00.924884002 +0000 UTC m=+4426.509206332" lastFinishedPulling="2025-12-05 13:23:17.425726773 +0000 UTC m=+4443.010049103" observedRunningTime="2025-12-05 13:23:18.142022857 +0000 UTC m=+4443.726345207" watchObservedRunningTime="2025-12-05 13:23:18.142414376 +0000 UTC m=+4443.726736716" Dec 05 13:23:19 crc kubenswrapper[4711]: I1205 13:23:19.154761 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skmtp" event={"ID":"c32caba0-55f1-44be-81c7-ddbb46915e7e","Type":"ContainerStarted","Data":"10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57"} Dec 05 13:23:19 crc kubenswrapper[4711]: I1205 13:23:19.191158 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-skmtp" podStartSLOduration=5.242964564 podStartE2EDuration="19.191140001s" podCreationTimestamp="2025-12-05 13:23:00 +0000 UTC" firstStartedPulling="2025-12-05 13:23:03.974122576 +0000 UTC m=+4429.558444906" lastFinishedPulling="2025-12-05 13:23:17.922298013 +0000 UTC m=+4443.506620343" observedRunningTime="2025-12-05 13:23:19.177212839 +0000 UTC m=+4444.761535179" watchObservedRunningTime="2025-12-05 13:23:19.191140001 +0000 UTC m=+4444.775462331" Dec 05 13:23:20 crc kubenswrapper[4711]: I1205 13:23:20.994045 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:20 crc kubenswrapper[4711]: I1205 13:23:20.994367 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:21 crc kubenswrapper[4711]: I1205 13:23:21.051349 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:24 crc kubenswrapper[4711]: I1205 13:23:24.683697 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:23:24 crc kubenswrapper[4711]: E1205 13:23:24.684354 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:23:27 crc kubenswrapper[4711]: I1205 13:23:27.599516 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:23:27 crc kubenswrapper[4711]: I1205 13:23:27.599830 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:23:27 crc kubenswrapper[4711]: I1205 13:23:27.652201 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:23:28 crc kubenswrapper[4711]: I1205 13:23:28.281993 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:23:28 crc kubenswrapper[4711]: I1205 13:23:28.471224 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2qrlt"] Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.252961 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2qrlt" podUID="9f91d561-82dd-4f73-8151-2738705c812d" containerName="registry-server" containerID="cri-o://45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff" gracePeriod=2 Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.748276 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.816864 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-utilities\") pod \"9f91d561-82dd-4f73-8151-2738705c812d\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.817212 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvfvk\" (UniqueName: \"kubernetes.io/projected/9f91d561-82dd-4f73-8151-2738705c812d-kube-api-access-wvfvk\") pod \"9f91d561-82dd-4f73-8151-2738705c812d\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.817274 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-catalog-content\") pod \"9f91d561-82dd-4f73-8151-2738705c812d\" (UID: \"9f91d561-82dd-4f73-8151-2738705c812d\") " Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.818658 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-utilities" (OuterVolumeSpecName: "utilities") pod "9f91d561-82dd-4f73-8151-2738705c812d" (UID: "9f91d561-82dd-4f73-8151-2738705c812d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.836092 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f91d561-82dd-4f73-8151-2738705c812d-kube-api-access-wvfvk" (OuterVolumeSpecName: "kube-api-access-wvfvk") pod "9f91d561-82dd-4f73-8151-2738705c812d" (UID: "9f91d561-82dd-4f73-8151-2738705c812d"). InnerVolumeSpecName "kube-api-access-wvfvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.869082 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f91d561-82dd-4f73-8151-2738705c812d" (UID: "9f91d561-82dd-4f73-8151-2738705c812d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.919773 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvfvk\" (UniqueName: \"kubernetes.io/projected/9f91d561-82dd-4f73-8151-2738705c812d-kube-api-access-wvfvk\") on node \"crc\" DevicePath \"\"" Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.919865 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:23:30 crc kubenswrapper[4711]: I1205 13:23:30.919879 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f91d561-82dd-4f73-8151-2738705c812d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.045742 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.267214 4711 generic.go:334] "Generic (PLEG): container finished" podID="9f91d561-82dd-4f73-8151-2738705c812d" containerID="45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff" exitCode=0 Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.267270 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2qrlt" event={"ID":"9f91d561-82dd-4f73-8151-2738705c812d","Type":"ContainerDied","Data":"45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff"} Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.267302 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2qrlt" event={"ID":"9f91d561-82dd-4f73-8151-2738705c812d","Type":"ContainerDied","Data":"d7a4e8852cb5a9692a1eb5d5aa9f4b06651eeb7ff1dac3b403cda32271077526"} Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.267326 4711 scope.go:117] "RemoveContainer" containerID="45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.267525 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2qrlt" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.309050 4711 scope.go:117] "RemoveContainer" containerID="8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.336161 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2qrlt"] Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.338035 4711 scope.go:117] "RemoveContainer" containerID="84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.349256 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2qrlt"] Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.397171 4711 scope.go:117] "RemoveContainer" containerID="45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff" Dec 05 13:23:31 crc kubenswrapper[4711]: E1205 13:23:31.397842 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff\": container with ID starting with 45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff not found: ID does not exist" containerID="45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.397883 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff"} err="failed to get container status \"45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff\": rpc error: code = NotFound desc = could not find container \"45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff\": container with ID starting with 45a2a74ef69049c09e81667cc4d1f080c90c4529d5746c434459aa2eeff1ffff not found: ID does not exist" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.397909 4711 scope.go:117] "RemoveContainer" containerID="8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d" Dec 05 13:23:31 crc kubenswrapper[4711]: E1205 13:23:31.398539 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d\": container with ID starting with 8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d not found: ID does not exist" containerID="8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.398569 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d"} err="failed to get container status \"8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d\": rpc error: code = NotFound desc = could not find container \"8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d\": container with ID starting with 8a03d14931528c5ce21d463493835a47c7d3d87d2f113b2c823c43cf7ae4e53d not found: ID does not exist" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.398596 4711 scope.go:117] "RemoveContainer" containerID="84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837" Dec 05 13:23:31 crc kubenswrapper[4711]: E1205 13:23:31.399146 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837\": container with ID starting with 84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837 not found: ID does not exist" containerID="84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837" Dec 05 13:23:31 crc kubenswrapper[4711]: I1205 13:23:31.399235 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837"} err="failed to get container status \"84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837\": rpc error: code = NotFound desc = could not find container \"84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837\": container with ID starting with 84027bcfa09239e6260bb2828ef0785f8866bb9ea123e278e3cce9bca862c837 not found: ID does not exist" Dec 05 13:23:32 crc kubenswrapper[4711]: I1205 13:23:32.717860 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f91d561-82dd-4f73-8151-2738705c812d" path="/var/lib/kubelet/pods/9f91d561-82dd-4f73-8151-2738705c812d/volumes" Dec 05 13:23:33 crc kubenswrapper[4711]: I1205 13:23:33.471753 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-skmtp"] Dec 05 13:23:33 crc kubenswrapper[4711]: I1205 13:23:33.472425 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-skmtp" podUID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerName="registry-server" containerID="cri-o://10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57" gracePeriod=2 Dec 05 13:23:33 crc kubenswrapper[4711]: I1205 13:23:33.970206 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:33 crc kubenswrapper[4711]: I1205 13:23:33.986204 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-utilities\") pod \"c32caba0-55f1-44be-81c7-ddbb46915e7e\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " Dec 05 13:23:33 crc kubenswrapper[4711]: I1205 13:23:33.986588 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-catalog-content\") pod \"c32caba0-55f1-44be-81c7-ddbb46915e7e\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " Dec 05 13:23:33 crc kubenswrapper[4711]: I1205 13:23:33.986775 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9pnf\" (UniqueName: \"kubernetes.io/projected/c32caba0-55f1-44be-81c7-ddbb46915e7e-kube-api-access-z9pnf\") pod \"c32caba0-55f1-44be-81c7-ddbb46915e7e\" (UID: \"c32caba0-55f1-44be-81c7-ddbb46915e7e\") " Dec 05 13:23:33 crc kubenswrapper[4711]: I1205 13:23:33.986959 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-utilities" (OuterVolumeSpecName: "utilities") pod "c32caba0-55f1-44be-81c7-ddbb46915e7e" (UID: "c32caba0-55f1-44be-81c7-ddbb46915e7e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:23:33 crc kubenswrapper[4711]: I1205 13:23:33.987368 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:23:33 crc kubenswrapper[4711]: I1205 13:23:33.993692 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c32caba0-55f1-44be-81c7-ddbb46915e7e-kube-api-access-z9pnf" (OuterVolumeSpecName: "kube-api-access-z9pnf") pod "c32caba0-55f1-44be-81c7-ddbb46915e7e" (UID: "c32caba0-55f1-44be-81c7-ddbb46915e7e"). InnerVolumeSpecName "kube-api-access-z9pnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.010685 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c32caba0-55f1-44be-81c7-ddbb46915e7e" (UID: "c32caba0-55f1-44be-81c7-ddbb46915e7e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.088697 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9pnf\" (UniqueName: \"kubernetes.io/projected/c32caba0-55f1-44be-81c7-ddbb46915e7e-kube-api-access-z9pnf\") on node \"crc\" DevicePath \"\"" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.088743 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32caba0-55f1-44be-81c7-ddbb46915e7e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.301909 4711 generic.go:334] "Generic (PLEG): container finished" podID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerID="10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57" exitCode=0 Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.301981 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skmtp" event={"ID":"c32caba0-55f1-44be-81c7-ddbb46915e7e","Type":"ContainerDied","Data":"10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57"} Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.302018 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skmtp" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.302041 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skmtp" event={"ID":"c32caba0-55f1-44be-81c7-ddbb46915e7e","Type":"ContainerDied","Data":"60d5e64baab7a42963fa0c613f7b029dee1df358fe77bc7d6866e73c678ecd03"} Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.302065 4711 scope.go:117] "RemoveContainer" containerID="10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.325851 4711 scope.go:117] "RemoveContainer" containerID="55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.351636 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-skmtp"] Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.362936 4711 scope.go:117] "RemoveContainer" containerID="3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.362957 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-skmtp"] Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.405800 4711 scope.go:117] "RemoveContainer" containerID="10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57" Dec 05 13:23:34 crc kubenswrapper[4711]: E1205 13:23:34.406352 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57\": container with ID starting with 10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57 not found: ID does not exist" containerID="10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.406691 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57"} err="failed to get container status \"10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57\": rpc error: code = NotFound desc = could not find container \"10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57\": container with ID starting with 10717ff32e71ba7b23fe17f49bdc2f119c34bd16292db2c21f8431b1bb5f2d57 not found: ID does not exist" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.406815 4711 scope.go:117] "RemoveContainer" containerID="55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4" Dec 05 13:23:34 crc kubenswrapper[4711]: E1205 13:23:34.407845 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4\": container with ID starting with 55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4 not found: ID does not exist" containerID="55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.407876 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4"} err="failed to get container status \"55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4\": rpc error: code = NotFound desc = could not find container \"55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4\": container with ID starting with 55e8aab5abdc20711e5388bda17e7fef838d162d32cb96f8fcc011ba6a4de4d4 not found: ID does not exist" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.407897 4711 scope.go:117] "RemoveContainer" containerID="3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d" Dec 05 13:23:34 crc kubenswrapper[4711]: E1205 13:23:34.408196 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d\": container with ID starting with 3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d not found: ID does not exist" containerID="3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.408272 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d"} err="failed to get container status \"3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d\": rpc error: code = NotFound desc = could not find container \"3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d\": container with ID starting with 3c3b1847f61f7576aaf559a4db3336541ac610213e5f077367cb86b51e30033d not found: ID does not exist" Dec 05 13:23:34 crc kubenswrapper[4711]: I1205 13:23:34.696770 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c32caba0-55f1-44be-81c7-ddbb46915e7e" path="/var/lib/kubelet/pods/c32caba0-55f1-44be-81c7-ddbb46915e7e/volumes" Dec 05 13:23:37 crc kubenswrapper[4711]: I1205 13:23:37.683872 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:23:37 crc kubenswrapper[4711]: E1205 13:23:37.684526 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:23:52 crc kubenswrapper[4711]: I1205 13:23:52.683061 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:23:52 crc kubenswrapper[4711]: E1205 13:23:52.683901 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:24:03 crc kubenswrapper[4711]: I1205 13:24:03.684061 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:24:03 crc kubenswrapper[4711]: E1205 13:24:03.685270 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:24:18 crc kubenswrapper[4711]: I1205 13:24:18.691841 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:24:18 crc kubenswrapper[4711]: E1205 13:24:18.692698 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:24:30 crc kubenswrapper[4711]: I1205 13:24:30.683032 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:24:30 crc kubenswrapper[4711]: E1205 13:24:30.683915 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:24:45 crc kubenswrapper[4711]: I1205 13:24:45.683925 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:24:45 crc kubenswrapper[4711]: E1205 13:24:45.684827 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:24:58 crc kubenswrapper[4711]: I1205 13:24:58.711964 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:24:58 crc kubenswrapper[4711]: E1205 13:24:58.715483 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:25:11 crc kubenswrapper[4711]: I1205 13:25:11.683582 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:25:11 crc kubenswrapper[4711]: E1205 13:25:11.684497 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:25:24 crc kubenswrapper[4711]: I1205 13:25:24.683676 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:25:24 crc kubenswrapper[4711]: E1205 13:25:24.684628 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:25:37 crc kubenswrapper[4711]: I1205 13:25:37.683129 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:25:37 crc kubenswrapper[4711]: E1205 13:25:37.684361 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:25:50 crc kubenswrapper[4711]: I1205 13:25:50.683363 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:25:50 crc kubenswrapper[4711]: E1205 13:25:50.684359 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:26:05 crc kubenswrapper[4711]: I1205 13:26:05.683490 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:26:05 crc kubenswrapper[4711]: E1205 13:26:05.684473 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:26:17 crc kubenswrapper[4711]: I1205 13:26:17.683212 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:26:17 crc kubenswrapper[4711]: E1205 13:26:17.684262 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:26:29 crc kubenswrapper[4711]: I1205 13:26:29.683699 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:26:31 crc kubenswrapper[4711]: I1205 13:26:31.042376 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"f89411112b63eb9d32d2af3c5293c78ad8bcb3c5e25f99fc99559b0cf0656c1b"} Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.278558 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c9mfb"] Dec 05 13:28:16 crc kubenswrapper[4711]: E1205 13:28:16.280578 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f91d561-82dd-4f73-8151-2738705c812d" containerName="registry-server" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.280883 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f91d561-82dd-4f73-8151-2738705c812d" containerName="registry-server" Dec 05 13:28:16 crc kubenswrapper[4711]: E1205 13:28:16.280967 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerName="extract-content" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.281038 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerName="extract-content" Dec 05 13:28:16 crc kubenswrapper[4711]: E1205 13:28:16.281146 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f91d561-82dd-4f73-8151-2738705c812d" containerName="extract-utilities" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.281317 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f91d561-82dd-4f73-8151-2738705c812d" containerName="extract-utilities" Dec 05 13:28:16 crc kubenswrapper[4711]: E1205 13:28:16.281423 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerName="registry-server" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.281502 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerName="registry-server" Dec 05 13:28:16 crc kubenswrapper[4711]: E1205 13:28:16.281596 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f91d561-82dd-4f73-8151-2738705c812d" containerName="extract-content" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.281664 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f91d561-82dd-4f73-8151-2738705c812d" containerName="extract-content" Dec 05 13:28:16 crc kubenswrapper[4711]: E1205 13:28:16.281734 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerName="extract-utilities" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.281809 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerName="extract-utilities" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.282144 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f91d561-82dd-4f73-8151-2738705c812d" containerName="registry-server" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.282252 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c32caba0-55f1-44be-81c7-ddbb46915e7e" containerName="registry-server" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.283961 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.288189 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c9mfb"] Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.312531 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7hh4\" (UniqueName: \"kubernetes.io/projected/338a96f6-2bc3-4ebe-9023-41944c8f562c-kube-api-access-h7hh4\") pod \"redhat-operators-c9mfb\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.312585 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-utilities\") pod \"redhat-operators-c9mfb\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.312661 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-catalog-content\") pod \"redhat-operators-c9mfb\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.414238 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-catalog-content\") pod \"redhat-operators-c9mfb\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.414507 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7hh4\" (UniqueName: \"kubernetes.io/projected/338a96f6-2bc3-4ebe-9023-41944c8f562c-kube-api-access-h7hh4\") pod \"redhat-operators-c9mfb\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.414546 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-utilities\") pod \"redhat-operators-c9mfb\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.414801 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-catalog-content\") pod \"redhat-operators-c9mfb\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.415034 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-utilities\") pod \"redhat-operators-c9mfb\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.441760 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7hh4\" (UniqueName: \"kubernetes.io/projected/338a96f6-2bc3-4ebe-9023-41944c8f562c-kube-api-access-h7hh4\") pod \"redhat-operators-c9mfb\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:16 crc kubenswrapper[4711]: I1205 13:28:16.612620 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:17 crc kubenswrapper[4711]: I1205 13:28:17.147889 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c9mfb"] Dec 05 13:28:17 crc kubenswrapper[4711]: I1205 13:28:17.568155 4711 generic.go:334] "Generic (PLEG): container finished" podID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerID="17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a" exitCode=0 Dec 05 13:28:17 crc kubenswrapper[4711]: I1205 13:28:17.568496 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9mfb" event={"ID":"338a96f6-2bc3-4ebe-9023-41944c8f562c","Type":"ContainerDied","Data":"17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a"} Dec 05 13:28:17 crc kubenswrapper[4711]: I1205 13:28:17.568522 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9mfb" event={"ID":"338a96f6-2bc3-4ebe-9023-41944c8f562c","Type":"ContainerStarted","Data":"a8120a2ee96927e416274cb83b955f519e8a3e554bdb97d442a28f8f5190a626"} Dec 05 13:28:17 crc kubenswrapper[4711]: I1205 13:28:17.570101 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:28:19 crc kubenswrapper[4711]: I1205 13:28:19.588547 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9mfb" event={"ID":"338a96f6-2bc3-4ebe-9023-41944c8f562c","Type":"ContainerStarted","Data":"943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686"} Dec 05 13:28:21 crc kubenswrapper[4711]: I1205 13:28:21.607207 4711 generic.go:334] "Generic (PLEG): container finished" podID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerID="943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686" exitCode=0 Dec 05 13:28:21 crc kubenswrapper[4711]: I1205 13:28:21.607280 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9mfb" event={"ID":"338a96f6-2bc3-4ebe-9023-41944c8f562c","Type":"ContainerDied","Data":"943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686"} Dec 05 13:28:22 crc kubenswrapper[4711]: I1205 13:28:22.628691 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9mfb" event={"ID":"338a96f6-2bc3-4ebe-9023-41944c8f562c","Type":"ContainerStarted","Data":"12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684"} Dec 05 13:28:22 crc kubenswrapper[4711]: I1205 13:28:22.664730 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c9mfb" podStartSLOduration=2.251429869 podStartE2EDuration="6.664697731s" podCreationTimestamp="2025-12-05 13:28:16 +0000 UTC" firstStartedPulling="2025-12-05 13:28:17.569792277 +0000 UTC m=+4743.154114607" lastFinishedPulling="2025-12-05 13:28:21.983060149 +0000 UTC m=+4747.567382469" observedRunningTime="2025-12-05 13:28:22.647366895 +0000 UTC m=+4748.231689235" watchObservedRunningTime="2025-12-05 13:28:22.664697731 +0000 UTC m=+4748.249020071" Dec 05 13:28:26 crc kubenswrapper[4711]: I1205 13:28:26.613408 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:26 crc kubenswrapper[4711]: I1205 13:28:26.614024 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:27 crc kubenswrapper[4711]: I1205 13:28:27.665067 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c9mfb" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerName="registry-server" probeResult="failure" output=< Dec 05 13:28:27 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 13:28:27 crc kubenswrapper[4711]: > Dec 05 13:28:36 crc kubenswrapper[4711]: I1205 13:28:36.663655 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:36 crc kubenswrapper[4711]: I1205 13:28:36.711874 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:36 crc kubenswrapper[4711]: I1205 13:28:36.914626 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c9mfb"] Dec 05 13:28:37 crc kubenswrapper[4711]: I1205 13:28:37.763550 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c9mfb" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerName="registry-server" containerID="cri-o://12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684" gracePeriod=2 Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.245481 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.403559 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-catalog-content\") pod \"338a96f6-2bc3-4ebe-9023-41944c8f562c\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.403708 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7hh4\" (UniqueName: \"kubernetes.io/projected/338a96f6-2bc3-4ebe-9023-41944c8f562c-kube-api-access-h7hh4\") pod \"338a96f6-2bc3-4ebe-9023-41944c8f562c\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.404880 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-utilities\") pod \"338a96f6-2bc3-4ebe-9023-41944c8f562c\" (UID: \"338a96f6-2bc3-4ebe-9023-41944c8f562c\") " Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.406026 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-utilities" (OuterVolumeSpecName: "utilities") pod "338a96f6-2bc3-4ebe-9023-41944c8f562c" (UID: "338a96f6-2bc3-4ebe-9023-41944c8f562c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.409897 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/338a96f6-2bc3-4ebe-9023-41944c8f562c-kube-api-access-h7hh4" (OuterVolumeSpecName: "kube-api-access-h7hh4") pod "338a96f6-2bc3-4ebe-9023-41944c8f562c" (UID: "338a96f6-2bc3-4ebe-9023-41944c8f562c"). InnerVolumeSpecName "kube-api-access-h7hh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.507978 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7hh4\" (UniqueName: \"kubernetes.io/projected/338a96f6-2bc3-4ebe-9023-41944c8f562c-kube-api-access-h7hh4\") on node \"crc\" DevicePath \"\"" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.508022 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.532213 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "338a96f6-2bc3-4ebe-9023-41944c8f562c" (UID: "338a96f6-2bc3-4ebe-9023-41944c8f562c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.610156 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/338a96f6-2bc3-4ebe-9023-41944c8f562c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.776147 4711 generic.go:334] "Generic (PLEG): container finished" podID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerID="12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684" exitCode=0 Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.776178 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9mfb" event={"ID":"338a96f6-2bc3-4ebe-9023-41944c8f562c","Type":"ContainerDied","Data":"12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684"} Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.776219 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9mfb" event={"ID":"338a96f6-2bc3-4ebe-9023-41944c8f562c","Type":"ContainerDied","Data":"a8120a2ee96927e416274cb83b955f519e8a3e554bdb97d442a28f8f5190a626"} Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.776239 4711 scope.go:117] "RemoveContainer" containerID="12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.776253 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9mfb" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.799323 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c9mfb"] Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.807087 4711 scope.go:117] "RemoveContainer" containerID="943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.824861 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c9mfb"] Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.833580 4711 scope.go:117] "RemoveContainer" containerID="17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.877073 4711 scope.go:117] "RemoveContainer" containerID="12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684" Dec 05 13:28:38 crc kubenswrapper[4711]: E1205 13:28:38.877630 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684\": container with ID starting with 12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684 not found: ID does not exist" containerID="12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.877666 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684"} err="failed to get container status \"12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684\": rpc error: code = NotFound desc = could not find container \"12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684\": container with ID starting with 12772b9b681237dbd1491e7333a89ede4368b69b868b880b5efd06a91d906684 not found: ID does not exist" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.877689 4711 scope.go:117] "RemoveContainer" containerID="943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686" Dec 05 13:28:38 crc kubenswrapper[4711]: E1205 13:28:38.878257 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686\": container with ID starting with 943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686 not found: ID does not exist" containerID="943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.878314 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686"} err="failed to get container status \"943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686\": rpc error: code = NotFound desc = could not find container \"943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686\": container with ID starting with 943be3547f8989f0db2ed3b8da41137b95c3f473d1e5108fe4d1da0820e24686 not found: ID does not exist" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.878351 4711 scope.go:117] "RemoveContainer" containerID="17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a" Dec 05 13:28:38 crc kubenswrapper[4711]: E1205 13:28:38.878765 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a\": container with ID starting with 17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a not found: ID does not exist" containerID="17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a" Dec 05 13:28:38 crc kubenswrapper[4711]: I1205 13:28:38.878792 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a"} err="failed to get container status \"17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a\": rpc error: code = NotFound desc = could not find container \"17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a\": container with ID starting with 17b062db8b9fd51faf58051486b82079a5b767706efd9154f348908e15887c6a not found: ID does not exist" Dec 05 13:28:40 crc kubenswrapper[4711]: I1205 13:28:40.698064 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" path="/var/lib/kubelet/pods/338a96f6-2bc3-4ebe-9023-41944c8f562c/volumes" Dec 05 13:28:48 crc kubenswrapper[4711]: I1205 13:28:48.301417 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:28:48 crc kubenswrapper[4711]: I1205 13:28:48.301974 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:29:18 crc kubenswrapper[4711]: I1205 13:29:18.301070 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:29:18 crc kubenswrapper[4711]: I1205 13:29:18.301766 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:29:48 crc kubenswrapper[4711]: I1205 13:29:48.300495 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:29:48 crc kubenswrapper[4711]: I1205 13:29:48.301048 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:29:48 crc kubenswrapper[4711]: I1205 13:29:48.301108 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:29:48 crc kubenswrapper[4711]: I1205 13:29:48.301959 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f89411112b63eb9d32d2af3c5293c78ad8bcb3c5e25f99fc99559b0cf0656c1b"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:29:48 crc kubenswrapper[4711]: I1205 13:29:48.302013 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://f89411112b63eb9d32d2af3c5293c78ad8bcb3c5e25f99fc99559b0cf0656c1b" gracePeriod=600 Dec 05 13:29:49 crc kubenswrapper[4711]: I1205 13:29:49.429520 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="f89411112b63eb9d32d2af3c5293c78ad8bcb3c5e25f99fc99559b0cf0656c1b" exitCode=0 Dec 05 13:29:49 crc kubenswrapper[4711]: I1205 13:29:49.429592 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"f89411112b63eb9d32d2af3c5293c78ad8bcb3c5e25f99fc99559b0cf0656c1b"} Dec 05 13:29:49 crc kubenswrapper[4711]: I1205 13:29:49.430086 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28"} Dec 05 13:29:49 crc kubenswrapper[4711]: I1205 13:29:49.430110 4711 scope.go:117] "RemoveContainer" containerID="952e42e0e2bcec38a00a0e8270b5800caf9e426b7b7d45aeffc62dd2811724fb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.151179 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb"] Dec 05 13:30:00 crc kubenswrapper[4711]: E1205 13:30:00.152218 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerName="extract-utilities" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.152238 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerName="extract-utilities" Dec 05 13:30:00 crc kubenswrapper[4711]: E1205 13:30:00.152257 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerName="registry-server" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.152270 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerName="registry-server" Dec 05 13:30:00 crc kubenswrapper[4711]: E1205 13:30:00.152296 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerName="extract-content" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.152306 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerName="extract-content" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.152588 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="338a96f6-2bc3-4ebe-9023-41944c8f562c" containerName="registry-server" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.153547 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.161905 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.161961 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.163119 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb"] Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.249915 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f63e6568-5de7-4011-87bc-84bde7142bf3-secret-volume\") pod \"collect-profiles-29415690-5lszb\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.250037 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f63e6568-5de7-4011-87bc-84bde7142bf3-config-volume\") pod \"collect-profiles-29415690-5lszb\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.250100 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwp54\" (UniqueName: \"kubernetes.io/projected/f63e6568-5de7-4011-87bc-84bde7142bf3-kube-api-access-xwp54\") pod \"collect-profiles-29415690-5lszb\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.352215 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f63e6568-5de7-4011-87bc-84bde7142bf3-secret-volume\") pod \"collect-profiles-29415690-5lszb\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.352376 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f63e6568-5de7-4011-87bc-84bde7142bf3-config-volume\") pod \"collect-profiles-29415690-5lszb\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.352487 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwp54\" (UniqueName: \"kubernetes.io/projected/f63e6568-5de7-4011-87bc-84bde7142bf3-kube-api-access-xwp54\") pod \"collect-profiles-29415690-5lszb\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.353601 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f63e6568-5de7-4011-87bc-84bde7142bf3-config-volume\") pod \"collect-profiles-29415690-5lszb\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.358097 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f63e6568-5de7-4011-87bc-84bde7142bf3-secret-volume\") pod \"collect-profiles-29415690-5lszb\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.366844 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwp54\" (UniqueName: \"kubernetes.io/projected/f63e6568-5de7-4011-87bc-84bde7142bf3-kube-api-access-xwp54\") pod \"collect-profiles-29415690-5lszb\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.496843 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:00 crc kubenswrapper[4711]: I1205 13:30:00.954477 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb"] Dec 05 13:30:01 crc kubenswrapper[4711]: I1205 13:30:01.556764 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" event={"ID":"f63e6568-5de7-4011-87bc-84bde7142bf3","Type":"ContainerStarted","Data":"a8d005cf9874ecef67d6058a5ecac11ca2215491fc2b299962f5030ea5716f68"} Dec 05 13:30:01 crc kubenswrapper[4711]: I1205 13:30:01.557353 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" event={"ID":"f63e6568-5de7-4011-87bc-84bde7142bf3","Type":"ContainerStarted","Data":"4555eec1dab3ab5392b057d26b6b0e09dd2daa29047b2d2f3c790c478771d1c1"} Dec 05 13:30:01 crc kubenswrapper[4711]: I1205 13:30:01.580818 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" podStartSLOduration=1.580796051 podStartE2EDuration="1.580796051s" podCreationTimestamp="2025-12-05 13:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 13:30:01.576416884 +0000 UTC m=+4847.160739234" watchObservedRunningTime="2025-12-05 13:30:01.580796051 +0000 UTC m=+4847.165118381" Dec 05 13:30:02 crc kubenswrapper[4711]: I1205 13:30:02.569079 4711 generic.go:334] "Generic (PLEG): container finished" podID="f63e6568-5de7-4011-87bc-84bde7142bf3" containerID="a8d005cf9874ecef67d6058a5ecac11ca2215491fc2b299962f5030ea5716f68" exitCode=0 Dec 05 13:30:02 crc kubenswrapper[4711]: I1205 13:30:02.569181 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" event={"ID":"f63e6568-5de7-4011-87bc-84bde7142bf3","Type":"ContainerDied","Data":"a8d005cf9874ecef67d6058a5ecac11ca2215491fc2b299962f5030ea5716f68"} Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.014216 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.032048 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwp54\" (UniqueName: \"kubernetes.io/projected/f63e6568-5de7-4011-87bc-84bde7142bf3-kube-api-access-xwp54\") pod \"f63e6568-5de7-4011-87bc-84bde7142bf3\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.032152 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f63e6568-5de7-4011-87bc-84bde7142bf3-secret-volume\") pod \"f63e6568-5de7-4011-87bc-84bde7142bf3\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.032462 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f63e6568-5de7-4011-87bc-84bde7142bf3-config-volume\") pod \"f63e6568-5de7-4011-87bc-84bde7142bf3\" (UID: \"f63e6568-5de7-4011-87bc-84bde7142bf3\") " Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.033605 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f63e6568-5de7-4011-87bc-84bde7142bf3-config-volume" (OuterVolumeSpecName: "config-volume") pod "f63e6568-5de7-4011-87bc-84bde7142bf3" (UID: "f63e6568-5de7-4011-87bc-84bde7142bf3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.041509 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f63e6568-5de7-4011-87bc-84bde7142bf3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f63e6568-5de7-4011-87bc-84bde7142bf3" (UID: "f63e6568-5de7-4011-87bc-84bde7142bf3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.041919 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f63e6568-5de7-4011-87bc-84bde7142bf3-kube-api-access-xwp54" (OuterVolumeSpecName: "kube-api-access-xwp54") pod "f63e6568-5de7-4011-87bc-84bde7142bf3" (UID: "f63e6568-5de7-4011-87bc-84bde7142bf3"). InnerVolumeSpecName "kube-api-access-xwp54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.135588 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f63e6568-5de7-4011-87bc-84bde7142bf3-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.135932 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwp54\" (UniqueName: \"kubernetes.io/projected/f63e6568-5de7-4011-87bc-84bde7142bf3-kube-api-access-xwp54\") on node \"crc\" DevicePath \"\"" Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.135946 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f63e6568-5de7-4011-87bc-84bde7142bf3-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.590964 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" event={"ID":"f63e6568-5de7-4011-87bc-84bde7142bf3","Type":"ContainerDied","Data":"4555eec1dab3ab5392b057d26b6b0e09dd2daa29047b2d2f3c790c478771d1c1"} Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.591019 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4555eec1dab3ab5392b057d26b6b0e09dd2daa29047b2d2f3c790c478771d1c1" Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.591082 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-5lszb" Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.660063 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c"] Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.671593 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-5qh9c"] Dec 05 13:30:04 crc kubenswrapper[4711]: I1205 13:30:04.697761 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0a4475c-cc65-4438-98a0-fa898baa9ce0" path="/var/lib/kubelet/pods/f0a4475c-cc65-4438-98a0-fa898baa9ce0/volumes" Dec 05 13:30:31 crc kubenswrapper[4711]: I1205 13:30:31.857597 4711 scope.go:117] "RemoveContainer" containerID="ac4ef4bea8181282bb0b7a8f0cac7a0cfe7739d57bc2c2ff3efc358432121ec3" Dec 05 13:31:48 crc kubenswrapper[4711]: I1205 13:31:48.301289 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:31:48 crc kubenswrapper[4711]: I1205 13:31:48.302087 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:32:18 crc kubenswrapper[4711]: I1205 13:32:18.301020 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:32:18 crc kubenswrapper[4711]: I1205 13:32:18.301669 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:32:48 crc kubenswrapper[4711]: I1205 13:32:48.301176 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:32:48 crc kubenswrapper[4711]: I1205 13:32:48.301711 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:32:48 crc kubenswrapper[4711]: I1205 13:32:48.301759 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:32:48 crc kubenswrapper[4711]: I1205 13:32:48.302622 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:32:48 crc kubenswrapper[4711]: I1205 13:32:48.302688 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" gracePeriod=600 Dec 05 13:32:48 crc kubenswrapper[4711]: E1205 13:32:48.424306 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:32:49 crc kubenswrapper[4711]: I1205 13:32:49.285741 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" exitCode=0 Dec 05 13:32:49 crc kubenswrapper[4711]: I1205 13:32:49.285808 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28"} Dec 05 13:32:49 crc kubenswrapper[4711]: I1205 13:32:49.286098 4711 scope.go:117] "RemoveContainer" containerID="f89411112b63eb9d32d2af3c5293c78ad8bcb3c5e25f99fc99559b0cf0656c1b" Dec 05 13:32:49 crc kubenswrapper[4711]: I1205 13:32:49.286774 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:32:49 crc kubenswrapper[4711]: E1205 13:32:49.287044 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:33:04 crc kubenswrapper[4711]: I1205 13:33:04.683279 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:33:04 crc kubenswrapper[4711]: E1205 13:33:04.684199 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:33:19 crc kubenswrapper[4711]: I1205 13:33:19.684142 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:33:19 crc kubenswrapper[4711]: E1205 13:33:19.684912 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:33:30 crc kubenswrapper[4711]: I1205 13:33:30.683473 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:33:30 crc kubenswrapper[4711]: E1205 13:33:30.684291 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.487060 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xdshv"] Dec 05 13:33:33 crc kubenswrapper[4711]: E1205 13:33:33.488078 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63e6568-5de7-4011-87bc-84bde7142bf3" containerName="collect-profiles" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.488093 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63e6568-5de7-4011-87bc-84bde7142bf3" containerName="collect-profiles" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.488365 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f63e6568-5de7-4011-87bc-84bde7142bf3" containerName="collect-profiles" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.490057 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.501217 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xdshv"] Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.588655 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-catalog-content\") pod \"certified-operators-xdshv\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.588751 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq8bw\" (UniqueName: \"kubernetes.io/projected/be075f57-0e4d-4f40-b929-40a0c1328b92-kube-api-access-cq8bw\") pod \"certified-operators-xdshv\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.588872 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-utilities\") pod \"certified-operators-xdshv\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.690405 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-catalog-content\") pod \"certified-operators-xdshv\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.690527 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq8bw\" (UniqueName: \"kubernetes.io/projected/be075f57-0e4d-4f40-b929-40a0c1328b92-kube-api-access-cq8bw\") pod \"certified-operators-xdshv\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.690717 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-utilities\") pod \"certified-operators-xdshv\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.691404 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-catalog-content\") pod \"certified-operators-xdshv\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.691905 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-utilities\") pod \"certified-operators-xdshv\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.711203 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq8bw\" (UniqueName: \"kubernetes.io/projected/be075f57-0e4d-4f40-b929-40a0c1328b92-kube-api-access-cq8bw\") pod \"certified-operators-xdshv\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:33 crc kubenswrapper[4711]: I1205 13:33:33.828736 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:34 crc kubenswrapper[4711]: I1205 13:33:34.380214 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xdshv"] Dec 05 13:33:34 crc kubenswrapper[4711]: I1205 13:33:34.697279 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:33:34 crc kubenswrapper[4711]: I1205 13:33:34.703435 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xdshv" event={"ID":"be075f57-0e4d-4f40-b929-40a0c1328b92","Type":"ContainerStarted","Data":"7d2a8d09ec2ad59a1b1fa04dadad4ac75d69b1ebf6597b41a5cad232c703a555"} Dec 05 13:33:34 crc kubenswrapper[4711]: I1205 13:33:34.703743 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xdshv" event={"ID":"be075f57-0e4d-4f40-b929-40a0c1328b92","Type":"ContainerStarted","Data":"2a6fcd5b4ca4d2cfbdfe6cf8f69bc107dbcd2205fd39786e93deae472eeab489"} Dec 05 13:33:35 crc kubenswrapper[4711]: I1205 13:33:35.704269 4711 generic.go:334] "Generic (PLEG): container finished" podID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerID="7d2a8d09ec2ad59a1b1fa04dadad4ac75d69b1ebf6597b41a5cad232c703a555" exitCode=0 Dec 05 13:33:35 crc kubenswrapper[4711]: I1205 13:33:35.704337 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xdshv" event={"ID":"be075f57-0e4d-4f40-b929-40a0c1328b92","Type":"ContainerDied","Data":"7d2a8d09ec2ad59a1b1fa04dadad4ac75d69b1ebf6597b41a5cad232c703a555"} Dec 05 13:33:35 crc kubenswrapper[4711]: I1205 13:33:35.704642 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xdshv" event={"ID":"be075f57-0e4d-4f40-b929-40a0c1328b92","Type":"ContainerStarted","Data":"ffa8f5c24a07ca6909dbd1b5021e6363d6fc451524bb3987b485d445ed5e5e62"} Dec 05 13:33:36 crc kubenswrapper[4711]: I1205 13:33:36.714206 4711 generic.go:334] "Generic (PLEG): container finished" podID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerID="ffa8f5c24a07ca6909dbd1b5021e6363d6fc451524bb3987b485d445ed5e5e62" exitCode=0 Dec 05 13:33:36 crc kubenswrapper[4711]: I1205 13:33:36.714263 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xdshv" event={"ID":"be075f57-0e4d-4f40-b929-40a0c1328b92","Type":"ContainerDied","Data":"ffa8f5c24a07ca6909dbd1b5021e6363d6fc451524bb3987b485d445ed5e5e62"} Dec 05 13:33:37 crc kubenswrapper[4711]: I1205 13:33:37.739269 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xdshv" event={"ID":"be075f57-0e4d-4f40-b929-40a0c1328b92","Type":"ContainerStarted","Data":"a336bc79574b93ff0c89639065828800e8bdd4df3064a250ccd26537f93e99c1"} Dec 05 13:33:37 crc kubenswrapper[4711]: I1205 13:33:37.759050 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xdshv" podStartSLOduration=2.356964527 podStartE2EDuration="4.759032504s" podCreationTimestamp="2025-12-05 13:33:33 +0000 UTC" firstStartedPulling="2025-12-05 13:33:34.697014039 +0000 UTC m=+5060.281336369" lastFinishedPulling="2025-12-05 13:33:37.099082016 +0000 UTC m=+5062.683404346" observedRunningTime="2025-12-05 13:33:37.756190905 +0000 UTC m=+5063.340513225" watchObservedRunningTime="2025-12-05 13:33:37.759032504 +0000 UTC m=+5063.343354834" Dec 05 13:33:41 crc kubenswrapper[4711]: I1205 13:33:41.682534 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:33:41 crc kubenswrapper[4711]: E1205 13:33:41.683357 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:33:43 crc kubenswrapper[4711]: I1205 13:33:43.828901 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:43 crc kubenswrapper[4711]: I1205 13:33:43.829232 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:43 crc kubenswrapper[4711]: I1205 13:33:43.897594 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.161882 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wrtmk"] Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.164820 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.191178 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wrtmk"] Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.337544 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-utilities\") pod \"community-operators-wrtmk\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.337739 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4w9z\" (UniqueName: \"kubernetes.io/projected/e03cf48a-3417-4ac7-adb8-ae90af236bed-kube-api-access-s4w9z\") pod \"community-operators-wrtmk\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.337819 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-catalog-content\") pod \"community-operators-wrtmk\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.440102 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-utilities\") pod \"community-operators-wrtmk\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.440177 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4w9z\" (UniqueName: \"kubernetes.io/projected/e03cf48a-3417-4ac7-adb8-ae90af236bed-kube-api-access-s4w9z\") pod \"community-operators-wrtmk\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.440194 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-catalog-content\") pod \"community-operators-wrtmk\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.440833 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-catalog-content\") pod \"community-operators-wrtmk\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.441055 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-utilities\") pod \"community-operators-wrtmk\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.471863 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4w9z\" (UniqueName: \"kubernetes.io/projected/e03cf48a-3417-4ac7-adb8-ae90af236bed-kube-api-access-s4w9z\") pod \"community-operators-wrtmk\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.500104 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:44 crc kubenswrapper[4711]: I1205 13:33:44.935842 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:45 crc kubenswrapper[4711]: I1205 13:33:45.134715 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wrtmk"] Dec 05 13:33:45 crc kubenswrapper[4711]: I1205 13:33:45.827068 4711 generic.go:334] "Generic (PLEG): container finished" podID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerID="d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944" exitCode=0 Dec 05 13:33:45 crc kubenswrapper[4711]: I1205 13:33:45.827202 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrtmk" event={"ID":"e03cf48a-3417-4ac7-adb8-ae90af236bed","Type":"ContainerDied","Data":"d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944"} Dec 05 13:33:45 crc kubenswrapper[4711]: I1205 13:33:45.827440 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrtmk" event={"ID":"e03cf48a-3417-4ac7-adb8-ae90af236bed","Type":"ContainerStarted","Data":"8aa55480ef5c75774fdb8e64b3b2ce8bab463ce1bda9f83b464a26cc2fdb36d8"} Dec 05 13:33:46 crc kubenswrapper[4711]: I1205 13:33:46.855733 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrtmk" event={"ID":"e03cf48a-3417-4ac7-adb8-ae90af236bed","Type":"ContainerStarted","Data":"6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122"} Dec 05 13:33:47 crc kubenswrapper[4711]: I1205 13:33:47.343385 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xdshv"] Dec 05 13:33:47 crc kubenswrapper[4711]: I1205 13:33:47.343672 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xdshv" podUID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerName="registry-server" containerID="cri-o://a336bc79574b93ff0c89639065828800e8bdd4df3064a250ccd26537f93e99c1" gracePeriod=2 Dec 05 13:33:47 crc kubenswrapper[4711]: I1205 13:33:47.867007 4711 generic.go:334] "Generic (PLEG): container finished" podID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerID="6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122" exitCode=0 Dec 05 13:33:47 crc kubenswrapper[4711]: I1205 13:33:47.867066 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrtmk" event={"ID":"e03cf48a-3417-4ac7-adb8-ae90af236bed","Type":"ContainerDied","Data":"6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122"} Dec 05 13:33:47 crc kubenswrapper[4711]: I1205 13:33:47.871490 4711 generic.go:334] "Generic (PLEG): container finished" podID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerID="a336bc79574b93ff0c89639065828800e8bdd4df3064a250ccd26537f93e99c1" exitCode=0 Dec 05 13:33:47 crc kubenswrapper[4711]: I1205 13:33:47.871543 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xdshv" event={"ID":"be075f57-0e4d-4f40-b929-40a0c1328b92","Type":"ContainerDied","Data":"a336bc79574b93ff0c89639065828800e8bdd4df3064a250ccd26537f93e99c1"} Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.457788 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.631286 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq8bw\" (UniqueName: \"kubernetes.io/projected/be075f57-0e4d-4f40-b929-40a0c1328b92-kube-api-access-cq8bw\") pod \"be075f57-0e4d-4f40-b929-40a0c1328b92\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.631421 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-catalog-content\") pod \"be075f57-0e4d-4f40-b929-40a0c1328b92\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.631460 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-utilities\") pod \"be075f57-0e4d-4f40-b929-40a0c1328b92\" (UID: \"be075f57-0e4d-4f40-b929-40a0c1328b92\") " Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.632100 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-utilities" (OuterVolumeSpecName: "utilities") pod "be075f57-0e4d-4f40-b929-40a0c1328b92" (UID: "be075f57-0e4d-4f40-b929-40a0c1328b92"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.640924 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be075f57-0e4d-4f40-b929-40a0c1328b92-kube-api-access-cq8bw" (OuterVolumeSpecName: "kube-api-access-cq8bw") pod "be075f57-0e4d-4f40-b929-40a0c1328b92" (UID: "be075f57-0e4d-4f40-b929-40a0c1328b92"). InnerVolumeSpecName "kube-api-access-cq8bw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.684452 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "be075f57-0e4d-4f40-b929-40a0c1328b92" (UID: "be075f57-0e4d-4f40-b929-40a0c1328b92"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.734569 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.734609 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be075f57-0e4d-4f40-b929-40a0c1328b92-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.734625 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq8bw\" (UniqueName: \"kubernetes.io/projected/be075f57-0e4d-4f40-b929-40a0c1328b92-kube-api-access-cq8bw\") on node \"crc\" DevicePath \"\"" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.883696 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xdshv" event={"ID":"be075f57-0e4d-4f40-b929-40a0c1328b92","Type":"ContainerDied","Data":"2a6fcd5b4ca4d2cfbdfe6cf8f69bc107dbcd2205fd39786e93deae472eeab489"} Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.883998 4711 scope.go:117] "RemoveContainer" containerID="a336bc79574b93ff0c89639065828800e8bdd4df3064a250ccd26537f93e99c1" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.884117 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xdshv" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.888286 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrtmk" event={"ID":"e03cf48a-3417-4ac7-adb8-ae90af236bed","Type":"ContainerStarted","Data":"9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a"} Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.923175 4711 scope.go:117] "RemoveContainer" containerID="ffa8f5c24a07ca6909dbd1b5021e6363d6fc451524bb3987b485d445ed5e5e62" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.927082 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xdshv"] Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.942729 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xdshv"] Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.949496 4711 scope.go:117] "RemoveContainer" containerID="7d2a8d09ec2ad59a1b1fa04dadad4ac75d69b1ebf6597b41a5cad232c703a555" Dec 05 13:33:48 crc kubenswrapper[4711]: I1205 13:33:48.951415 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wrtmk" podStartSLOduration=2.20660311 podStartE2EDuration="4.951368815s" podCreationTimestamp="2025-12-05 13:33:44 +0000 UTC" firstStartedPulling="2025-12-05 13:33:45.831770133 +0000 UTC m=+5071.416092463" lastFinishedPulling="2025-12-05 13:33:48.576535838 +0000 UTC m=+5074.160858168" observedRunningTime="2025-12-05 13:33:48.934952842 +0000 UTC m=+5074.519275172" watchObservedRunningTime="2025-12-05 13:33:48.951368815 +0000 UTC m=+5074.535691145" Dec 05 13:33:50 crc kubenswrapper[4711]: I1205 13:33:50.698345 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be075f57-0e4d-4f40-b929-40a0c1328b92" path="/var/lib/kubelet/pods/be075f57-0e4d-4f40-b929-40a0c1328b92/volumes" Dec 05 13:33:54 crc kubenswrapper[4711]: I1205 13:33:54.500686 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:54 crc kubenswrapper[4711]: I1205 13:33:54.502134 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:54 crc kubenswrapper[4711]: I1205 13:33:54.562279 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:54 crc kubenswrapper[4711]: I1205 13:33:54.991105 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:55 crc kubenswrapper[4711]: I1205 13:33:55.063871 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wrtmk"] Dec 05 13:33:55 crc kubenswrapper[4711]: I1205 13:33:55.684122 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:33:55 crc kubenswrapper[4711]: E1205 13:33:55.684462 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:33:57 crc kubenswrapper[4711]: I1205 13:33:57.675034 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wrtmk" podUID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerName="registry-server" containerID="cri-o://9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a" gracePeriod=2 Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.389242 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.467541 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-catalog-content\") pod \"e03cf48a-3417-4ac7-adb8-ae90af236bed\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.467756 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-utilities\") pod \"e03cf48a-3417-4ac7-adb8-ae90af236bed\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.467881 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4w9z\" (UniqueName: \"kubernetes.io/projected/e03cf48a-3417-4ac7-adb8-ae90af236bed-kube-api-access-s4w9z\") pod \"e03cf48a-3417-4ac7-adb8-ae90af236bed\" (UID: \"e03cf48a-3417-4ac7-adb8-ae90af236bed\") " Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.468851 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-utilities" (OuterVolumeSpecName: "utilities") pod "e03cf48a-3417-4ac7-adb8-ae90af236bed" (UID: "e03cf48a-3417-4ac7-adb8-ae90af236bed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.474009 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e03cf48a-3417-4ac7-adb8-ae90af236bed-kube-api-access-s4w9z" (OuterVolumeSpecName: "kube-api-access-s4w9z") pod "e03cf48a-3417-4ac7-adb8-ae90af236bed" (UID: "e03cf48a-3417-4ac7-adb8-ae90af236bed"). InnerVolumeSpecName "kube-api-access-s4w9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.545818 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e03cf48a-3417-4ac7-adb8-ae90af236bed" (UID: "e03cf48a-3417-4ac7-adb8-ae90af236bed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.570618 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.570652 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4w9z\" (UniqueName: \"kubernetes.io/projected/e03cf48a-3417-4ac7-adb8-ae90af236bed-kube-api-access-s4w9z\") on node \"crc\" DevicePath \"\"" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.570666 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e03cf48a-3417-4ac7-adb8-ae90af236bed-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.686336 4711 generic.go:334] "Generic (PLEG): container finished" podID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerID="9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a" exitCode=0 Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.691020 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wrtmk" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.698735 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrtmk" event={"ID":"e03cf48a-3417-4ac7-adb8-ae90af236bed","Type":"ContainerDied","Data":"9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a"} Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.698779 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrtmk" event={"ID":"e03cf48a-3417-4ac7-adb8-ae90af236bed","Type":"ContainerDied","Data":"8aa55480ef5c75774fdb8e64b3b2ce8bab463ce1bda9f83b464a26cc2fdb36d8"} Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.698804 4711 scope.go:117] "RemoveContainer" containerID="9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.743720 4711 scope.go:117] "RemoveContainer" containerID="6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.746832 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wrtmk"] Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.756757 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wrtmk"] Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.769902 4711 scope.go:117] "RemoveContainer" containerID="d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.828778 4711 scope.go:117] "RemoveContainer" containerID="9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a" Dec 05 13:33:58 crc kubenswrapper[4711]: E1205 13:33:58.829600 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a\": container with ID starting with 9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a not found: ID does not exist" containerID="9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.829647 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a"} err="failed to get container status \"9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a\": rpc error: code = NotFound desc = could not find container \"9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a\": container with ID starting with 9d0b05af6c9afcb5c955ced7e786fee446391fd7643c1093ade5b185733b064a not found: ID does not exist" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.829673 4711 scope.go:117] "RemoveContainer" containerID="6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122" Dec 05 13:33:58 crc kubenswrapper[4711]: E1205 13:33:58.830102 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122\": container with ID starting with 6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122 not found: ID does not exist" containerID="6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.830124 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122"} err="failed to get container status \"6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122\": rpc error: code = NotFound desc = could not find container \"6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122\": container with ID starting with 6744b9a60c4893bf524d971a4a724aaa71ee88fa004dcb490ff13ec9fe3ae122 not found: ID does not exist" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.830137 4711 scope.go:117] "RemoveContainer" containerID="d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944" Dec 05 13:33:58 crc kubenswrapper[4711]: E1205 13:33:58.830430 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944\": container with ID starting with d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944 not found: ID does not exist" containerID="d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944" Dec 05 13:33:58 crc kubenswrapper[4711]: I1205 13:33:58.830547 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944"} err="failed to get container status \"d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944\": rpc error: code = NotFound desc = could not find container \"d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944\": container with ID starting with d8fac1e583a71a5a5c32e706c0707fbda40a1db420e77376720f87d1dfc56944 not found: ID does not exist" Dec 05 13:34:00 crc kubenswrapper[4711]: I1205 13:34:00.695557 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e03cf48a-3417-4ac7-adb8-ae90af236bed" path="/var/lib/kubelet/pods/e03cf48a-3417-4ac7-adb8-ae90af236bed/volumes" Dec 05 13:34:06 crc kubenswrapper[4711]: I1205 13:34:06.684178 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:34:06 crc kubenswrapper[4711]: E1205 13:34:06.685148 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:34:20 crc kubenswrapper[4711]: I1205 13:34:20.684464 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:34:20 crc kubenswrapper[4711]: E1205 13:34:20.685217 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:34:33 crc kubenswrapper[4711]: I1205 13:34:33.683488 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:34:33 crc kubenswrapper[4711]: E1205 13:34:33.684140 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:34:45 crc kubenswrapper[4711]: I1205 13:34:45.683594 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:34:45 crc kubenswrapper[4711]: E1205 13:34:45.684485 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:34:56 crc kubenswrapper[4711]: I1205 13:34:56.683583 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:34:56 crc kubenswrapper[4711]: E1205 13:34:56.684372 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.107671 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d4728"] Dec 05 13:34:58 crc kubenswrapper[4711]: E1205 13:34:58.108353 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerName="extract-utilities" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.108366 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerName="extract-utilities" Dec 05 13:34:58 crc kubenswrapper[4711]: E1205 13:34:58.108404 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerName="extract-content" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.108410 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerName="extract-content" Dec 05 13:34:58 crc kubenswrapper[4711]: E1205 13:34:58.108432 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerName="registry-server" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.108438 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerName="registry-server" Dec 05 13:34:58 crc kubenswrapper[4711]: E1205 13:34:58.108451 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerName="registry-server" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.108457 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerName="registry-server" Dec 05 13:34:58 crc kubenswrapper[4711]: E1205 13:34:58.108475 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerName="extract-content" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.108481 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerName="extract-content" Dec 05 13:34:58 crc kubenswrapper[4711]: E1205 13:34:58.108487 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerName="extract-utilities" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.108493 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerName="extract-utilities" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.108706 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="be075f57-0e4d-4f40-b929-40a0c1328b92" containerName="registry-server" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.108728 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e03cf48a-3417-4ac7-adb8-ae90af236bed" containerName="registry-server" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.110448 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.124516 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d4728"] Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.219281 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxd42\" (UniqueName: \"kubernetes.io/projected/70d47a5b-3a7c-4032-99a1-542ba83971da-kube-api-access-rxd42\") pod \"redhat-marketplace-d4728\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.219509 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-catalog-content\") pod \"redhat-marketplace-d4728\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.219556 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-utilities\") pod \"redhat-marketplace-d4728\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.322189 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxd42\" (UniqueName: \"kubernetes.io/projected/70d47a5b-3a7c-4032-99a1-542ba83971da-kube-api-access-rxd42\") pod \"redhat-marketplace-d4728\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.322347 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-catalog-content\") pod \"redhat-marketplace-d4728\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.322375 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-utilities\") pod \"redhat-marketplace-d4728\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.323007 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-catalog-content\") pod \"redhat-marketplace-d4728\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.323046 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-utilities\") pod \"redhat-marketplace-d4728\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.344131 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxd42\" (UniqueName: \"kubernetes.io/projected/70d47a5b-3a7c-4032-99a1-542ba83971da-kube-api-access-rxd42\") pod \"redhat-marketplace-d4728\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.440665 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:34:58 crc kubenswrapper[4711]: I1205 13:34:58.991557 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d4728"] Dec 05 13:34:59 crc kubenswrapper[4711]: I1205 13:34:59.266556 4711 generic.go:334] "Generic (PLEG): container finished" podID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerID="7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24" exitCode=0 Dec 05 13:34:59 crc kubenswrapper[4711]: I1205 13:34:59.266700 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d4728" event={"ID":"70d47a5b-3a7c-4032-99a1-542ba83971da","Type":"ContainerDied","Data":"7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24"} Dec 05 13:34:59 crc kubenswrapper[4711]: I1205 13:34:59.266891 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d4728" event={"ID":"70d47a5b-3a7c-4032-99a1-542ba83971da","Type":"ContainerStarted","Data":"d9cd2809f3fbac7a3f1798a0e413713073e94af8b710bd008405c4a33dd087d6"} Dec 05 13:35:00 crc kubenswrapper[4711]: I1205 13:35:00.278486 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d4728" event={"ID":"70d47a5b-3a7c-4032-99a1-542ba83971da","Type":"ContainerStarted","Data":"d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01"} Dec 05 13:35:01 crc kubenswrapper[4711]: I1205 13:35:01.290523 4711 generic.go:334] "Generic (PLEG): container finished" podID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerID="d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01" exitCode=0 Dec 05 13:35:01 crc kubenswrapper[4711]: I1205 13:35:01.290625 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d4728" event={"ID":"70d47a5b-3a7c-4032-99a1-542ba83971da","Type":"ContainerDied","Data":"d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01"} Dec 05 13:35:02 crc kubenswrapper[4711]: I1205 13:35:02.300338 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d4728" event={"ID":"70d47a5b-3a7c-4032-99a1-542ba83971da","Type":"ContainerStarted","Data":"33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c"} Dec 05 13:35:02 crc kubenswrapper[4711]: I1205 13:35:02.325566 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d4728" podStartSLOduration=1.847174402 podStartE2EDuration="4.325539555s" podCreationTimestamp="2025-12-05 13:34:58 +0000 UTC" firstStartedPulling="2025-12-05 13:34:59.277043713 +0000 UTC m=+5144.861366043" lastFinishedPulling="2025-12-05 13:35:01.755408836 +0000 UTC m=+5147.339731196" observedRunningTime="2025-12-05 13:35:02.316210667 +0000 UTC m=+5147.900533007" watchObservedRunningTime="2025-12-05 13:35:02.325539555 +0000 UTC m=+5147.909861895" Dec 05 13:35:08 crc kubenswrapper[4711]: I1205 13:35:08.440811 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:35:08 crc kubenswrapper[4711]: I1205 13:35:08.442409 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:35:08 crc kubenswrapper[4711]: I1205 13:35:08.503613 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:35:09 crc kubenswrapper[4711]: I1205 13:35:09.405616 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:35:09 crc kubenswrapper[4711]: I1205 13:35:09.456840 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d4728"] Dec 05 13:35:11 crc kubenswrapper[4711]: I1205 13:35:11.383770 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d4728" podUID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerName="registry-server" containerID="cri-o://33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c" gracePeriod=2 Dec 05 13:35:11 crc kubenswrapper[4711]: I1205 13:35:11.684242 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:35:11 crc kubenswrapper[4711]: E1205 13:35:11.684894 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:35:11 crc kubenswrapper[4711]: I1205 13:35:11.894378 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.005451 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-catalog-content\") pod \"70d47a5b-3a7c-4032-99a1-542ba83971da\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.005520 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-utilities\") pod \"70d47a5b-3a7c-4032-99a1-542ba83971da\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.005685 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxd42\" (UniqueName: \"kubernetes.io/projected/70d47a5b-3a7c-4032-99a1-542ba83971da-kube-api-access-rxd42\") pod \"70d47a5b-3a7c-4032-99a1-542ba83971da\" (UID: \"70d47a5b-3a7c-4032-99a1-542ba83971da\") " Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.006982 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-utilities" (OuterVolumeSpecName: "utilities") pod "70d47a5b-3a7c-4032-99a1-542ba83971da" (UID: "70d47a5b-3a7c-4032-99a1-542ba83971da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.016636 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70d47a5b-3a7c-4032-99a1-542ba83971da-kube-api-access-rxd42" (OuterVolumeSpecName: "kube-api-access-rxd42") pod "70d47a5b-3a7c-4032-99a1-542ba83971da" (UID: "70d47a5b-3a7c-4032-99a1-542ba83971da"). InnerVolumeSpecName "kube-api-access-rxd42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.023041 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70d47a5b-3a7c-4032-99a1-542ba83971da" (UID: "70d47a5b-3a7c-4032-99a1-542ba83971da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.107878 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.107910 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70d47a5b-3a7c-4032-99a1-542ba83971da-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.107920 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxd42\" (UniqueName: \"kubernetes.io/projected/70d47a5b-3a7c-4032-99a1-542ba83971da-kube-api-access-rxd42\") on node \"crc\" DevicePath \"\"" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.396241 4711 generic.go:334] "Generic (PLEG): container finished" podID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerID="33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c" exitCode=0 Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.396302 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d4728" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.396306 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d4728" event={"ID":"70d47a5b-3a7c-4032-99a1-542ba83971da","Type":"ContainerDied","Data":"33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c"} Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.396498 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d4728" event={"ID":"70d47a5b-3a7c-4032-99a1-542ba83971da","Type":"ContainerDied","Data":"d9cd2809f3fbac7a3f1798a0e413713073e94af8b710bd008405c4a33dd087d6"} Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.396552 4711 scope.go:117] "RemoveContainer" containerID="33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.418225 4711 scope.go:117] "RemoveContainer" containerID="d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.434129 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d4728"] Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.443866 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d4728"] Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.458838 4711 scope.go:117] "RemoveContainer" containerID="7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.498343 4711 scope.go:117] "RemoveContainer" containerID="33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c" Dec 05 13:35:12 crc kubenswrapper[4711]: E1205 13:35:12.498836 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c\": container with ID starting with 33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c not found: ID does not exist" containerID="33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.498888 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c"} err="failed to get container status \"33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c\": rpc error: code = NotFound desc = could not find container \"33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c\": container with ID starting with 33511e0e2863d41aaf75dd666c0e33a699e4fe1528a30bbe9f2c1d9a928d257c not found: ID does not exist" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.498918 4711 scope.go:117] "RemoveContainer" containerID="d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01" Dec 05 13:35:12 crc kubenswrapper[4711]: E1205 13:35:12.499423 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01\": container with ID starting with d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01 not found: ID does not exist" containerID="d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.499466 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01"} err="failed to get container status \"d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01\": rpc error: code = NotFound desc = could not find container \"d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01\": container with ID starting with d050e19f563efb95c3b07b140e8203357da4cac75ef70166c843dc556ae8fb01 not found: ID does not exist" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.499492 4711 scope.go:117] "RemoveContainer" containerID="7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24" Dec 05 13:35:12 crc kubenswrapper[4711]: E1205 13:35:12.499830 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24\": container with ID starting with 7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24 not found: ID does not exist" containerID="7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.499866 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24"} err="failed to get container status \"7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24\": rpc error: code = NotFound desc = could not find container \"7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24\": container with ID starting with 7357657cd01895ed1c4c9e4a8bcf666ed402ad22f94f7e69c302799546b5ca24 not found: ID does not exist" Dec 05 13:35:12 crc kubenswrapper[4711]: I1205 13:35:12.699986 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70d47a5b-3a7c-4032-99a1-542ba83971da" path="/var/lib/kubelet/pods/70d47a5b-3a7c-4032-99a1-542ba83971da/volumes" Dec 05 13:35:24 crc kubenswrapper[4711]: I1205 13:35:24.684671 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:35:24 crc kubenswrapper[4711]: E1205 13:35:24.685858 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:35:36 crc kubenswrapper[4711]: I1205 13:35:36.683557 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:35:36 crc kubenswrapper[4711]: E1205 13:35:36.684240 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:35:48 crc kubenswrapper[4711]: I1205 13:35:48.692725 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:35:48 crc kubenswrapper[4711]: E1205 13:35:48.693541 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:36:03 crc kubenswrapper[4711]: I1205 13:36:03.683308 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:36:03 crc kubenswrapper[4711]: E1205 13:36:03.684288 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:36:15 crc kubenswrapper[4711]: I1205 13:36:15.683300 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:36:15 crc kubenswrapper[4711]: E1205 13:36:15.684078 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:36:27 crc kubenswrapper[4711]: I1205 13:36:27.683443 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:36:27 crc kubenswrapper[4711]: E1205 13:36:27.684236 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:36:38 crc kubenswrapper[4711]: I1205 13:36:38.690995 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:36:38 crc kubenswrapper[4711]: E1205 13:36:38.692836 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:36:52 crc kubenswrapper[4711]: I1205 13:36:52.683610 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:36:52 crc kubenswrapper[4711]: E1205 13:36:52.684463 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:37:03 crc kubenswrapper[4711]: I1205 13:37:03.684787 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:37:03 crc kubenswrapper[4711]: E1205 13:37:03.685515 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:37:16 crc kubenswrapper[4711]: I1205 13:37:16.683479 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:37:16 crc kubenswrapper[4711]: E1205 13:37:16.684299 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:37:28 crc kubenswrapper[4711]: I1205 13:37:28.691858 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:37:28 crc kubenswrapper[4711]: E1205 13:37:28.692731 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:37:41 crc kubenswrapper[4711]: I1205 13:37:41.683529 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:37:41 crc kubenswrapper[4711]: E1205 13:37:41.684439 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:37:52 crc kubenswrapper[4711]: I1205 13:37:52.683041 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:37:52 crc kubenswrapper[4711]: I1205 13:37:52.953898 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"7c1d5964e14b33e363076a70032ee756ffeb328e15ae767ff960de7b1596fcbb"} Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.313657 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pwl57"] Dec 05 13:38:17 crc kubenswrapper[4711]: E1205 13:38:17.317068 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerName="extract-content" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.317242 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerName="extract-content" Dec 05 13:38:17 crc kubenswrapper[4711]: E1205 13:38:17.317518 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerName="registry-server" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.317663 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerName="registry-server" Dec 05 13:38:17 crc kubenswrapper[4711]: E1205 13:38:17.317802 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerName="extract-utilities" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.317921 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerName="extract-utilities" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.318295 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="70d47a5b-3a7c-4032-99a1-542ba83971da" containerName="registry-server" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.320689 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.327052 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pwl57"] Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.348487 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-catalog-content\") pod \"redhat-operators-pwl57\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.350845 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j2l5\" (UniqueName: \"kubernetes.io/projected/99fc4148-24e0-4938-8999-f8c369a289c0-kube-api-access-7j2l5\") pod \"redhat-operators-pwl57\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.350929 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-utilities\") pod \"redhat-operators-pwl57\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.452832 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-catalog-content\") pod \"redhat-operators-pwl57\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.452983 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j2l5\" (UniqueName: \"kubernetes.io/projected/99fc4148-24e0-4938-8999-f8c369a289c0-kube-api-access-7j2l5\") pod \"redhat-operators-pwl57\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.453031 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-utilities\") pod \"redhat-operators-pwl57\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.453369 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-catalog-content\") pod \"redhat-operators-pwl57\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.453551 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-utilities\") pod \"redhat-operators-pwl57\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.484464 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j2l5\" (UniqueName: \"kubernetes.io/projected/99fc4148-24e0-4938-8999-f8c369a289c0-kube-api-access-7j2l5\") pod \"redhat-operators-pwl57\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:17 crc kubenswrapper[4711]: I1205 13:38:17.654872 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:18 crc kubenswrapper[4711]: I1205 13:38:18.188507 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pwl57"] Dec 05 13:38:18 crc kubenswrapper[4711]: I1205 13:38:18.237205 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwl57" event={"ID":"99fc4148-24e0-4938-8999-f8c369a289c0","Type":"ContainerStarted","Data":"264acc748608b9bfbfc6e623b197c9ea4cded45ae13a720151ef5c0e765a9b37"} Dec 05 13:38:19 crc kubenswrapper[4711]: I1205 13:38:19.253711 4711 generic.go:334] "Generic (PLEG): container finished" podID="99fc4148-24e0-4938-8999-f8c369a289c0" containerID="02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59" exitCode=0 Dec 05 13:38:19 crc kubenswrapper[4711]: I1205 13:38:19.253940 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwl57" event={"ID":"99fc4148-24e0-4938-8999-f8c369a289c0","Type":"ContainerDied","Data":"02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59"} Dec 05 13:38:22 crc kubenswrapper[4711]: I1205 13:38:22.286539 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwl57" event={"ID":"99fc4148-24e0-4938-8999-f8c369a289c0","Type":"ContainerStarted","Data":"adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec"} Dec 05 13:38:25 crc kubenswrapper[4711]: I1205 13:38:25.314749 4711 generic.go:334] "Generic (PLEG): container finished" podID="99fc4148-24e0-4938-8999-f8c369a289c0" containerID="adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec" exitCode=0 Dec 05 13:38:25 crc kubenswrapper[4711]: I1205 13:38:25.314808 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwl57" event={"ID":"99fc4148-24e0-4938-8999-f8c369a289c0","Type":"ContainerDied","Data":"adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec"} Dec 05 13:38:26 crc kubenswrapper[4711]: I1205 13:38:26.327029 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwl57" event={"ID":"99fc4148-24e0-4938-8999-f8c369a289c0","Type":"ContainerStarted","Data":"4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4"} Dec 05 13:38:26 crc kubenswrapper[4711]: I1205 13:38:26.362376 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pwl57" podStartSLOduration=2.89342029 podStartE2EDuration="9.362355818s" podCreationTimestamp="2025-12-05 13:38:17 +0000 UTC" firstStartedPulling="2025-12-05 13:38:19.258636314 +0000 UTC m=+5344.842958654" lastFinishedPulling="2025-12-05 13:38:25.727571862 +0000 UTC m=+5351.311894182" observedRunningTime="2025-12-05 13:38:26.353616483 +0000 UTC m=+5351.937938833" watchObservedRunningTime="2025-12-05 13:38:26.362355818 +0000 UTC m=+5351.946678148" Dec 05 13:38:27 crc kubenswrapper[4711]: I1205 13:38:27.655976 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:27 crc kubenswrapper[4711]: I1205 13:38:27.656360 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:28 crc kubenswrapper[4711]: I1205 13:38:28.718567 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pwl57" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" containerName="registry-server" probeResult="failure" output=< Dec 05 13:38:28 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 13:38:28 crc kubenswrapper[4711]: > Dec 05 13:38:37 crc kubenswrapper[4711]: I1205 13:38:37.703103 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:37 crc kubenswrapper[4711]: I1205 13:38:37.764094 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:37 crc kubenswrapper[4711]: I1205 13:38:37.942147 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pwl57"] Dec 05 13:38:39 crc kubenswrapper[4711]: I1205 13:38:39.460944 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pwl57" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" containerName="registry-server" containerID="cri-o://4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4" gracePeriod=2 Dec 05 13:38:39 crc kubenswrapper[4711]: I1205 13:38:39.966696 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.135268 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7j2l5\" (UniqueName: \"kubernetes.io/projected/99fc4148-24e0-4938-8999-f8c369a289c0-kube-api-access-7j2l5\") pod \"99fc4148-24e0-4938-8999-f8c369a289c0\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.135630 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-utilities\") pod \"99fc4148-24e0-4938-8999-f8c369a289c0\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.135690 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-catalog-content\") pod \"99fc4148-24e0-4938-8999-f8c369a289c0\" (UID: \"99fc4148-24e0-4938-8999-f8c369a289c0\") " Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.137011 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-utilities" (OuterVolumeSpecName: "utilities") pod "99fc4148-24e0-4938-8999-f8c369a289c0" (UID: "99fc4148-24e0-4938-8999-f8c369a289c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.142179 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99fc4148-24e0-4938-8999-f8c369a289c0-kube-api-access-7j2l5" (OuterVolumeSpecName: "kube-api-access-7j2l5") pod "99fc4148-24e0-4938-8999-f8c369a289c0" (UID: "99fc4148-24e0-4938-8999-f8c369a289c0"). InnerVolumeSpecName "kube-api-access-7j2l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.238575 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.238600 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7j2l5\" (UniqueName: \"kubernetes.io/projected/99fc4148-24e0-4938-8999-f8c369a289c0-kube-api-access-7j2l5\") on node \"crc\" DevicePath \"\"" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.245995 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99fc4148-24e0-4938-8999-f8c369a289c0" (UID: "99fc4148-24e0-4938-8999-f8c369a289c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.340122 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99fc4148-24e0-4938-8999-f8c369a289c0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.470821 4711 generic.go:334] "Generic (PLEG): container finished" podID="99fc4148-24e0-4938-8999-f8c369a289c0" containerID="4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4" exitCode=0 Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.470873 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwl57" event={"ID":"99fc4148-24e0-4938-8999-f8c369a289c0","Type":"ContainerDied","Data":"4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4"} Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.470932 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwl57" event={"ID":"99fc4148-24e0-4938-8999-f8c369a289c0","Type":"ContainerDied","Data":"264acc748608b9bfbfc6e623b197c9ea4cded45ae13a720151ef5c0e765a9b37"} Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.470950 4711 scope.go:117] "RemoveContainer" containerID="4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.471510 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwl57" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.499337 4711 scope.go:117] "RemoveContainer" containerID="adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.503529 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pwl57"] Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.511812 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pwl57"] Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.537839 4711 scope.go:117] "RemoveContainer" containerID="02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.576232 4711 scope.go:117] "RemoveContainer" containerID="4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4" Dec 05 13:38:40 crc kubenswrapper[4711]: E1205 13:38:40.576673 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4\": container with ID starting with 4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4 not found: ID does not exist" containerID="4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.576711 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4"} err="failed to get container status \"4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4\": rpc error: code = NotFound desc = could not find container \"4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4\": container with ID starting with 4c618c5abb4d212814f2ed68fd45eac0a9fe71a558a72d6a5a091967906300f4 not found: ID does not exist" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.576736 4711 scope.go:117] "RemoveContainer" containerID="adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec" Dec 05 13:38:40 crc kubenswrapper[4711]: E1205 13:38:40.576987 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec\": container with ID starting with adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec not found: ID does not exist" containerID="adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.577018 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec"} err="failed to get container status \"adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec\": rpc error: code = NotFound desc = could not find container \"adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec\": container with ID starting with adab9f1ff962acfe15b593dce8caab0f42acae7d0732da0ea6ec94909a794fec not found: ID does not exist" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.577037 4711 scope.go:117] "RemoveContainer" containerID="02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59" Dec 05 13:38:40 crc kubenswrapper[4711]: E1205 13:38:40.577534 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59\": container with ID starting with 02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59 not found: ID does not exist" containerID="02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.577563 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59"} err="failed to get container status \"02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59\": rpc error: code = NotFound desc = could not find container \"02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59\": container with ID starting with 02613f1306785b17aa72f0eb06e67d009336119bf2989bdfb754b23c2c7e4f59 not found: ID does not exist" Dec 05 13:38:40 crc kubenswrapper[4711]: I1205 13:38:40.694741 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" path="/var/lib/kubelet/pods/99fc4148-24e0-4938-8999-f8c369a289c0/volumes" Dec 05 13:40:18 crc kubenswrapper[4711]: I1205 13:40:18.301939 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:40:18 crc kubenswrapper[4711]: I1205 13:40:18.302357 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:40:48 crc kubenswrapper[4711]: I1205 13:40:48.300736 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:40:48 crc kubenswrapper[4711]: I1205 13:40:48.301319 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:41:18 crc kubenswrapper[4711]: I1205 13:41:18.301011 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:41:18 crc kubenswrapper[4711]: I1205 13:41:18.301746 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:41:18 crc kubenswrapper[4711]: I1205 13:41:18.301805 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:41:18 crc kubenswrapper[4711]: I1205 13:41:18.302781 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c1d5964e14b33e363076a70032ee756ffeb328e15ae767ff960de7b1596fcbb"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:41:18 crc kubenswrapper[4711]: I1205 13:41:18.302868 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://7c1d5964e14b33e363076a70032ee756ffeb328e15ae767ff960de7b1596fcbb" gracePeriod=600 Dec 05 13:41:19 crc kubenswrapper[4711]: I1205 13:41:19.376160 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="7c1d5964e14b33e363076a70032ee756ffeb328e15ae767ff960de7b1596fcbb" exitCode=0 Dec 05 13:41:19 crc kubenswrapper[4711]: I1205 13:41:19.376243 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"7c1d5964e14b33e363076a70032ee756ffeb328e15ae767ff960de7b1596fcbb"} Dec 05 13:41:19 crc kubenswrapper[4711]: I1205 13:41:19.376785 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058"} Dec 05 13:41:19 crc kubenswrapper[4711]: I1205 13:41:19.376803 4711 scope.go:117] "RemoveContainer" containerID="87f9c94ef8b83bc234283cb6022d599a8cbc7fa12d093007625720ec1cbb7b28" Dec 05 13:43:18 crc kubenswrapper[4711]: I1205 13:43:18.300871 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:43:18 crc kubenswrapper[4711]: I1205 13:43:18.301456 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:43:48 crc kubenswrapper[4711]: I1205 13:43:48.300835 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:43:48 crc kubenswrapper[4711]: I1205 13:43:48.301796 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.424579 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jw5wp"] Dec 05 13:43:56 crc kubenswrapper[4711]: E1205 13:43:56.425662 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" containerName="registry-server" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.425679 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" containerName="registry-server" Dec 05 13:43:56 crc kubenswrapper[4711]: E1205 13:43:56.425701 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" containerName="extract-content" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.425708 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" containerName="extract-content" Dec 05 13:43:56 crc kubenswrapper[4711]: E1205 13:43:56.425773 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" containerName="extract-utilities" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.425782 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" containerName="extract-utilities" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.426022 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="99fc4148-24e0-4938-8999-f8c369a289c0" containerName="registry-server" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.427788 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.438284 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jw5wp"] Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.529760 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-utilities\") pod \"community-operators-jw5wp\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.529921 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-catalog-content\") pod \"community-operators-jw5wp\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.529972 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-869wj\" (UniqueName: \"kubernetes.io/projected/132ff989-12e2-4b5c-919b-8dd045bcd734-kube-api-access-869wj\") pod \"community-operators-jw5wp\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.631736 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-utilities\") pod \"community-operators-jw5wp\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.631886 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-catalog-content\") pod \"community-operators-jw5wp\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.631927 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-869wj\" (UniqueName: \"kubernetes.io/projected/132ff989-12e2-4b5c-919b-8dd045bcd734-kube-api-access-869wj\") pod \"community-operators-jw5wp\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.632482 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-catalog-content\") pod \"community-operators-jw5wp\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.632739 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-utilities\") pod \"community-operators-jw5wp\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.651250 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-869wj\" (UniqueName: \"kubernetes.io/projected/132ff989-12e2-4b5c-919b-8dd045bcd734-kube-api-access-869wj\") pod \"community-operators-jw5wp\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:56 crc kubenswrapper[4711]: I1205 13:43:56.755147 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:43:57 crc kubenswrapper[4711]: I1205 13:43:57.333235 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jw5wp"] Dec 05 13:43:57 crc kubenswrapper[4711]: W1205 13:43:57.341985 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod132ff989_12e2_4b5c_919b_8dd045bcd734.slice/crio-47c64917e715467210a40f24b86f0444442180b44a4abd67172433145fbacb32 WatchSource:0}: Error finding container 47c64917e715467210a40f24b86f0444442180b44a4abd67172433145fbacb32: Status 404 returned error can't find the container with id 47c64917e715467210a40f24b86f0444442180b44a4abd67172433145fbacb32 Dec 05 13:43:58 crc kubenswrapper[4711]: I1205 13:43:58.011348 4711 generic.go:334] "Generic (PLEG): container finished" podID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerID="a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57" exitCode=0 Dec 05 13:43:58 crc kubenswrapper[4711]: I1205 13:43:58.011411 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jw5wp" event={"ID":"132ff989-12e2-4b5c-919b-8dd045bcd734","Type":"ContainerDied","Data":"a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57"} Dec 05 13:43:58 crc kubenswrapper[4711]: I1205 13:43:58.011437 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jw5wp" event={"ID":"132ff989-12e2-4b5c-919b-8dd045bcd734","Type":"ContainerStarted","Data":"47c64917e715467210a40f24b86f0444442180b44a4abd67172433145fbacb32"} Dec 05 13:43:58 crc kubenswrapper[4711]: I1205 13:43:58.013008 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:43:59 crc kubenswrapper[4711]: I1205 13:43:59.025791 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jw5wp" event={"ID":"132ff989-12e2-4b5c-919b-8dd045bcd734","Type":"ContainerStarted","Data":"0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c"} Dec 05 13:44:00 crc kubenswrapper[4711]: I1205 13:44:00.040269 4711 generic.go:334] "Generic (PLEG): container finished" podID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerID="0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c" exitCode=0 Dec 05 13:44:00 crc kubenswrapper[4711]: I1205 13:44:00.040365 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jw5wp" event={"ID":"132ff989-12e2-4b5c-919b-8dd045bcd734","Type":"ContainerDied","Data":"0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c"} Dec 05 13:44:01 crc kubenswrapper[4711]: I1205 13:44:01.057613 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jw5wp" event={"ID":"132ff989-12e2-4b5c-919b-8dd045bcd734","Type":"ContainerStarted","Data":"1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92"} Dec 05 13:44:01 crc kubenswrapper[4711]: I1205 13:44:01.084469 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jw5wp" podStartSLOduration=2.655695335 podStartE2EDuration="5.084453813s" podCreationTimestamp="2025-12-05 13:43:56 +0000 UTC" firstStartedPulling="2025-12-05 13:43:58.012704839 +0000 UTC m=+5683.597027179" lastFinishedPulling="2025-12-05 13:44:00.441463327 +0000 UTC m=+5686.025785657" observedRunningTime="2025-12-05 13:44:01.082366853 +0000 UTC m=+5686.666689183" watchObservedRunningTime="2025-12-05 13:44:01.084453813 +0000 UTC m=+5686.668776143" Dec 05 13:44:06 crc kubenswrapper[4711]: I1205 13:44:06.755955 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:44:06 crc kubenswrapper[4711]: I1205 13:44:06.756606 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:44:06 crc kubenswrapper[4711]: I1205 13:44:06.816165 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:44:07 crc kubenswrapper[4711]: I1205 13:44:07.181962 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:44:07 crc kubenswrapper[4711]: I1205 13:44:07.241145 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jw5wp"] Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.144604 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jw5wp" podUID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerName="registry-server" containerID="cri-o://1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92" gracePeriod=2 Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.638271 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.716020 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-869wj\" (UniqueName: \"kubernetes.io/projected/132ff989-12e2-4b5c-919b-8dd045bcd734-kube-api-access-869wj\") pod \"132ff989-12e2-4b5c-919b-8dd045bcd734\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.716147 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-utilities\") pod \"132ff989-12e2-4b5c-919b-8dd045bcd734\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.716182 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-catalog-content\") pod \"132ff989-12e2-4b5c-919b-8dd045bcd734\" (UID: \"132ff989-12e2-4b5c-919b-8dd045bcd734\") " Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.721376 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-utilities" (OuterVolumeSpecName: "utilities") pod "132ff989-12e2-4b5c-919b-8dd045bcd734" (UID: "132ff989-12e2-4b5c-919b-8dd045bcd734"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.726986 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/132ff989-12e2-4b5c-919b-8dd045bcd734-kube-api-access-869wj" (OuterVolumeSpecName: "kube-api-access-869wj") pod "132ff989-12e2-4b5c-919b-8dd045bcd734" (UID: "132ff989-12e2-4b5c-919b-8dd045bcd734"). InnerVolumeSpecName "kube-api-access-869wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.772546 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "132ff989-12e2-4b5c-919b-8dd045bcd734" (UID: "132ff989-12e2-4b5c-919b-8dd045bcd734"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.819545 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-869wj\" (UniqueName: \"kubernetes.io/projected/132ff989-12e2-4b5c-919b-8dd045bcd734-kube-api-access-869wj\") on node \"crc\" DevicePath \"\"" Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.819579 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:44:09 crc kubenswrapper[4711]: I1205 13:44:09.819588 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132ff989-12e2-4b5c-919b-8dd045bcd734-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.165688 4711 generic.go:334] "Generic (PLEG): container finished" podID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerID="1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92" exitCode=0 Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.165734 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jw5wp" event={"ID":"132ff989-12e2-4b5c-919b-8dd045bcd734","Type":"ContainerDied","Data":"1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92"} Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.165761 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jw5wp" event={"ID":"132ff989-12e2-4b5c-919b-8dd045bcd734","Type":"ContainerDied","Data":"47c64917e715467210a40f24b86f0444442180b44a4abd67172433145fbacb32"} Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.165789 4711 scope.go:117] "RemoveContainer" containerID="1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.165829 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jw5wp" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.197123 4711 scope.go:117] "RemoveContainer" containerID="0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.203404 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jw5wp"] Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.219179 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jw5wp"] Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.222674 4711 scope.go:117] "RemoveContainer" containerID="a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.276374 4711 scope.go:117] "RemoveContainer" containerID="1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92" Dec 05 13:44:10 crc kubenswrapper[4711]: E1205 13:44:10.277725 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92\": container with ID starting with 1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92 not found: ID does not exist" containerID="1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.277761 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92"} err="failed to get container status \"1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92\": rpc error: code = NotFound desc = could not find container \"1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92\": container with ID starting with 1891d54e29277d52cc6930c3a21c6f17bda472bb149ddd568a3b19e043279e92 not found: ID does not exist" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.277784 4711 scope.go:117] "RemoveContainer" containerID="0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c" Dec 05 13:44:10 crc kubenswrapper[4711]: E1205 13:44:10.278850 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c\": container with ID starting with 0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c not found: ID does not exist" containerID="0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.278888 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c"} err="failed to get container status \"0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c\": rpc error: code = NotFound desc = could not find container \"0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c\": container with ID starting with 0af9174ad48cbd8d7488a5db42ba397015b159ca16dda781f71ebde07a42e36c not found: ID does not exist" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.278914 4711 scope.go:117] "RemoveContainer" containerID="a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57" Dec 05 13:44:10 crc kubenswrapper[4711]: E1205 13:44:10.279234 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57\": container with ID starting with a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57 not found: ID does not exist" containerID="a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.279294 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57"} err="failed to get container status \"a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57\": rpc error: code = NotFound desc = could not find container \"a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57\": container with ID starting with a7c9b8033dbad85f8ba5fdde198e14f391d5893d7eb7b3cc6c3897398d3d8e57 not found: ID does not exist" Dec 05 13:44:10 crc kubenswrapper[4711]: I1205 13:44:10.695339 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="132ff989-12e2-4b5c-919b-8dd045bcd734" path="/var/lib/kubelet/pods/132ff989-12e2-4b5c-919b-8dd045bcd734/volumes" Dec 05 13:44:18 crc kubenswrapper[4711]: I1205 13:44:18.301361 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:44:18 crc kubenswrapper[4711]: I1205 13:44:18.302110 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:44:18 crc kubenswrapper[4711]: I1205 13:44:18.302173 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:44:18 crc kubenswrapper[4711]: I1205 13:44:18.303049 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:44:18 crc kubenswrapper[4711]: I1205 13:44:18.303107 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" gracePeriod=600 Dec 05 13:44:18 crc kubenswrapper[4711]: E1205 13:44:18.435508 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:44:19 crc kubenswrapper[4711]: I1205 13:44:19.252016 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" exitCode=0 Dec 05 13:44:19 crc kubenswrapper[4711]: I1205 13:44:19.252115 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058"} Dec 05 13:44:19 crc kubenswrapper[4711]: I1205 13:44:19.252294 4711 scope.go:117] "RemoveContainer" containerID="7c1d5964e14b33e363076a70032ee756ffeb328e15ae767ff960de7b1596fcbb" Dec 05 13:44:19 crc kubenswrapper[4711]: I1205 13:44:19.253020 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:44:19 crc kubenswrapper[4711]: E1205 13:44:19.253345 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:44:31 crc kubenswrapper[4711]: I1205 13:44:31.684097 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:44:31 crc kubenswrapper[4711]: E1205 13:44:31.684933 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:44:42 crc kubenswrapper[4711]: I1205 13:44:42.683575 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:44:42 crc kubenswrapper[4711]: E1205 13:44:42.684666 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:44:53 crc kubenswrapper[4711]: I1205 13:44:53.683583 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:44:53 crc kubenswrapper[4711]: E1205 13:44:53.685483 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.162505 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc"] Dec 05 13:45:00 crc kubenswrapper[4711]: E1205 13:45:00.163683 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerName="extract-content" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.163706 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerName="extract-content" Dec 05 13:45:00 crc kubenswrapper[4711]: E1205 13:45:00.163730 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerName="registry-server" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.163737 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerName="registry-server" Dec 05 13:45:00 crc kubenswrapper[4711]: E1205 13:45:00.163769 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerName="extract-utilities" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.163777 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerName="extract-utilities" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.164021 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="132ff989-12e2-4b5c-919b-8dd045bcd734" containerName="registry-server" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.165001 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.169951 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.170264 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.180942 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc"] Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.272042 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7185d66-1c66-4af2-82bb-7aee90361274-secret-volume\") pod \"collect-profiles-29415705-2sqcc\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.272130 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdlpb\" (UniqueName: \"kubernetes.io/projected/d7185d66-1c66-4af2-82bb-7aee90361274-kube-api-access-xdlpb\") pod \"collect-profiles-29415705-2sqcc\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.272180 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7185d66-1c66-4af2-82bb-7aee90361274-config-volume\") pod \"collect-profiles-29415705-2sqcc\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.374132 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7185d66-1c66-4af2-82bb-7aee90361274-secret-volume\") pod \"collect-profiles-29415705-2sqcc\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.374207 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdlpb\" (UniqueName: \"kubernetes.io/projected/d7185d66-1c66-4af2-82bb-7aee90361274-kube-api-access-xdlpb\") pod \"collect-profiles-29415705-2sqcc\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.374249 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7185d66-1c66-4af2-82bb-7aee90361274-config-volume\") pod \"collect-profiles-29415705-2sqcc\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.375234 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7185d66-1c66-4af2-82bb-7aee90361274-config-volume\") pod \"collect-profiles-29415705-2sqcc\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.379938 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7185d66-1c66-4af2-82bb-7aee90361274-secret-volume\") pod \"collect-profiles-29415705-2sqcc\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.401398 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdlpb\" (UniqueName: \"kubernetes.io/projected/d7185d66-1c66-4af2-82bb-7aee90361274-kube-api-access-xdlpb\") pod \"collect-profiles-29415705-2sqcc\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.500150 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:00 crc kubenswrapper[4711]: I1205 13:45:00.978341 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc"] Dec 05 13:45:01 crc kubenswrapper[4711]: I1205 13:45:01.690557 4711 generic.go:334] "Generic (PLEG): container finished" podID="d7185d66-1c66-4af2-82bb-7aee90361274" containerID="e2c0fbe4873334a33c45930e7ab0a5c9789fb440e55aadd97eb1a5ddc0857b06" exitCode=0 Dec 05 13:45:01 crc kubenswrapper[4711]: I1205 13:45:01.690628 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" event={"ID":"d7185d66-1c66-4af2-82bb-7aee90361274","Type":"ContainerDied","Data":"e2c0fbe4873334a33c45930e7ab0a5c9789fb440e55aadd97eb1a5ddc0857b06"} Dec 05 13:45:01 crc kubenswrapper[4711]: I1205 13:45:01.690917 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" event={"ID":"d7185d66-1c66-4af2-82bb-7aee90361274","Type":"ContainerStarted","Data":"9699a8aac8437c9b0a43c781553840a6e83d7ca3230570321e3dc524639566d9"} Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.068971 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.130061 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7185d66-1c66-4af2-82bb-7aee90361274-config-volume\") pod \"d7185d66-1c66-4af2-82bb-7aee90361274\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.130114 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7185d66-1c66-4af2-82bb-7aee90361274-secret-volume\") pod \"d7185d66-1c66-4af2-82bb-7aee90361274\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.130405 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdlpb\" (UniqueName: \"kubernetes.io/projected/d7185d66-1c66-4af2-82bb-7aee90361274-kube-api-access-xdlpb\") pod \"d7185d66-1c66-4af2-82bb-7aee90361274\" (UID: \"d7185d66-1c66-4af2-82bb-7aee90361274\") " Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.130914 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7185d66-1c66-4af2-82bb-7aee90361274-config-volume" (OuterVolumeSpecName: "config-volume") pod "d7185d66-1c66-4af2-82bb-7aee90361274" (UID: "d7185d66-1c66-4af2-82bb-7aee90361274"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.131039 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7185d66-1c66-4af2-82bb-7aee90361274-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.138452 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7185d66-1c66-4af2-82bb-7aee90361274-kube-api-access-xdlpb" (OuterVolumeSpecName: "kube-api-access-xdlpb") pod "d7185d66-1c66-4af2-82bb-7aee90361274" (UID: "d7185d66-1c66-4af2-82bb-7aee90361274"). InnerVolumeSpecName "kube-api-access-xdlpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.139559 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7185d66-1c66-4af2-82bb-7aee90361274-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d7185d66-1c66-4af2-82bb-7aee90361274" (UID: "d7185d66-1c66-4af2-82bb-7aee90361274"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.234247 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7185d66-1c66-4af2-82bb-7aee90361274-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.234313 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdlpb\" (UniqueName: \"kubernetes.io/projected/d7185d66-1c66-4af2-82bb-7aee90361274-kube-api-access-xdlpb\") on node \"crc\" DevicePath \"\"" Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.716486 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" event={"ID":"d7185d66-1c66-4af2-82bb-7aee90361274","Type":"ContainerDied","Data":"9699a8aac8437c9b0a43c781553840a6e83d7ca3230570321e3dc524639566d9"} Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.716531 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9699a8aac8437c9b0a43c781553840a6e83d7ca3230570321e3dc524639566d9" Dec 05 13:45:03 crc kubenswrapper[4711]: I1205 13:45:03.717072 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-2sqcc" Dec 05 13:45:04 crc kubenswrapper[4711]: I1205 13:45:04.181342 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65"] Dec 05 13:45:04 crc kubenswrapper[4711]: I1205 13:45:04.205184 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-tfr65"] Dec 05 13:45:04 crc kubenswrapper[4711]: I1205 13:45:04.696410 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076" path="/var/lib/kubelet/pods/8d00bbe6-a8a7-473c-83a8-f2bf0ca0f076/volumes" Dec 05 13:45:06 crc kubenswrapper[4711]: I1205 13:45:06.682876 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:45:06 crc kubenswrapper[4711]: E1205 13:45:06.683466 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:45:18 crc kubenswrapper[4711]: I1205 13:45:18.698484 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:45:18 crc kubenswrapper[4711]: E1205 13:45:18.699957 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:45:32 crc kubenswrapper[4711]: I1205 13:45:32.290424 4711 scope.go:117] "RemoveContainer" containerID="b8485a33e4d4bff7394546690a4d5b7aa35c3343afd829abeece65a8825b5049" Dec 05 13:45:32 crc kubenswrapper[4711]: I1205 13:45:32.684040 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:45:32 crc kubenswrapper[4711]: E1205 13:45:32.684591 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:45:46 crc kubenswrapper[4711]: I1205 13:45:46.683944 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:45:46 crc kubenswrapper[4711]: E1205 13:45:46.684966 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.087470 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8nx29"] Dec 05 13:45:49 crc kubenswrapper[4711]: E1205 13:45:49.088652 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7185d66-1c66-4af2-82bb-7aee90361274" containerName="collect-profiles" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.088670 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7185d66-1c66-4af2-82bb-7aee90361274" containerName="collect-profiles" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.088934 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7185d66-1c66-4af2-82bb-7aee90361274" containerName="collect-profiles" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.092757 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.125942 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nx29"] Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.242767 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p266r\" (UniqueName: \"kubernetes.io/projected/4088208a-442d-4429-8815-9e74e2b163f2-kube-api-access-p266r\") pod \"redhat-marketplace-8nx29\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.242970 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-utilities\") pod \"redhat-marketplace-8nx29\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.243019 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-catalog-content\") pod \"redhat-marketplace-8nx29\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.345876 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p266r\" (UniqueName: \"kubernetes.io/projected/4088208a-442d-4429-8815-9e74e2b163f2-kube-api-access-p266r\") pod \"redhat-marketplace-8nx29\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.346000 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-utilities\") pod \"redhat-marketplace-8nx29\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.346024 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-catalog-content\") pod \"redhat-marketplace-8nx29\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.346612 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-catalog-content\") pod \"redhat-marketplace-8nx29\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.346673 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-utilities\") pod \"redhat-marketplace-8nx29\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.378956 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p266r\" (UniqueName: \"kubernetes.io/projected/4088208a-442d-4429-8815-9e74e2b163f2-kube-api-access-p266r\") pod \"redhat-marketplace-8nx29\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:49 crc kubenswrapper[4711]: I1205 13:45:49.424795 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:50 crc kubenswrapper[4711]: I1205 13:45:50.010685 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nx29"] Dec 05 13:45:50 crc kubenswrapper[4711]: W1205 13:45:50.024615 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4088208a_442d_4429_8815_9e74e2b163f2.slice/crio-2ed21696a4d51c45f88d76d4bd16fc0e9eafb80a5f5a9f7a4bdd7034b7b52a26 WatchSource:0}: Error finding container 2ed21696a4d51c45f88d76d4bd16fc0e9eafb80a5f5a9f7a4bdd7034b7b52a26: Status 404 returned error can't find the container with id 2ed21696a4d51c45f88d76d4bd16fc0e9eafb80a5f5a9f7a4bdd7034b7b52a26 Dec 05 13:45:50 crc kubenswrapper[4711]: I1205 13:45:50.207031 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nx29" event={"ID":"4088208a-442d-4429-8815-9e74e2b163f2","Type":"ContainerStarted","Data":"2ed21696a4d51c45f88d76d4bd16fc0e9eafb80a5f5a9f7a4bdd7034b7b52a26"} Dec 05 13:45:51 crc kubenswrapper[4711]: I1205 13:45:51.220541 4711 generic.go:334] "Generic (PLEG): container finished" podID="4088208a-442d-4429-8815-9e74e2b163f2" containerID="a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73" exitCode=0 Dec 05 13:45:51 crc kubenswrapper[4711]: I1205 13:45:51.220691 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nx29" event={"ID":"4088208a-442d-4429-8815-9e74e2b163f2","Type":"ContainerDied","Data":"a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73"} Dec 05 13:45:52 crc kubenswrapper[4711]: I1205 13:45:52.235434 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nx29" event={"ID":"4088208a-442d-4429-8815-9e74e2b163f2","Type":"ContainerStarted","Data":"6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0"} Dec 05 13:45:53 crc kubenswrapper[4711]: I1205 13:45:53.246275 4711 generic.go:334] "Generic (PLEG): container finished" podID="4088208a-442d-4429-8815-9e74e2b163f2" containerID="6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0" exitCode=0 Dec 05 13:45:53 crc kubenswrapper[4711]: I1205 13:45:53.246332 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nx29" event={"ID":"4088208a-442d-4429-8815-9e74e2b163f2","Type":"ContainerDied","Data":"6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0"} Dec 05 13:45:54 crc kubenswrapper[4711]: I1205 13:45:54.258306 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nx29" event={"ID":"4088208a-442d-4429-8815-9e74e2b163f2","Type":"ContainerStarted","Data":"7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93"} Dec 05 13:45:54 crc kubenswrapper[4711]: I1205 13:45:54.282969 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8nx29" podStartSLOduration=2.754630073 podStartE2EDuration="5.282941161s" podCreationTimestamp="2025-12-05 13:45:49 +0000 UTC" firstStartedPulling="2025-12-05 13:45:51.222704149 +0000 UTC m=+5796.807026509" lastFinishedPulling="2025-12-05 13:45:53.751015267 +0000 UTC m=+5799.335337597" observedRunningTime="2025-12-05 13:45:54.277208171 +0000 UTC m=+5799.861530511" watchObservedRunningTime="2025-12-05 13:45:54.282941161 +0000 UTC m=+5799.867263501" Dec 05 13:45:57 crc kubenswrapper[4711]: I1205 13:45:57.683750 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:45:57 crc kubenswrapper[4711]: E1205 13:45:57.684320 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:45:59 crc kubenswrapper[4711]: I1205 13:45:59.425056 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:59 crc kubenswrapper[4711]: I1205 13:45:59.425680 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:45:59 crc kubenswrapper[4711]: I1205 13:45:59.481894 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:46:00 crc kubenswrapper[4711]: I1205 13:46:00.391145 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:46:00 crc kubenswrapper[4711]: I1205 13:46:00.443142 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nx29"] Dec 05 13:46:02 crc kubenswrapper[4711]: I1205 13:46:02.340764 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8nx29" podUID="4088208a-442d-4429-8815-9e74e2b163f2" containerName="registry-server" containerID="cri-o://7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93" gracePeriod=2 Dec 05 13:46:02 crc kubenswrapper[4711]: I1205 13:46:02.841477 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:46:02 crc kubenswrapper[4711]: I1205 13:46:02.951788 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p266r\" (UniqueName: \"kubernetes.io/projected/4088208a-442d-4429-8815-9e74e2b163f2-kube-api-access-p266r\") pod \"4088208a-442d-4429-8815-9e74e2b163f2\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " Dec 05 13:46:02 crc kubenswrapper[4711]: I1205 13:46:02.952076 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-utilities\") pod \"4088208a-442d-4429-8815-9e74e2b163f2\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " Dec 05 13:46:02 crc kubenswrapper[4711]: I1205 13:46:02.952122 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-catalog-content\") pod \"4088208a-442d-4429-8815-9e74e2b163f2\" (UID: \"4088208a-442d-4429-8815-9e74e2b163f2\") " Dec 05 13:46:02 crc kubenswrapper[4711]: I1205 13:46:02.952896 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-utilities" (OuterVolumeSpecName: "utilities") pod "4088208a-442d-4429-8815-9e74e2b163f2" (UID: "4088208a-442d-4429-8815-9e74e2b163f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:46:02 crc kubenswrapper[4711]: I1205 13:46:02.963699 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4088208a-442d-4429-8815-9e74e2b163f2-kube-api-access-p266r" (OuterVolumeSpecName: "kube-api-access-p266r") pod "4088208a-442d-4429-8815-9e74e2b163f2" (UID: "4088208a-442d-4429-8815-9e74e2b163f2"). InnerVolumeSpecName "kube-api-access-p266r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:46:02 crc kubenswrapper[4711]: I1205 13:46:02.971652 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4088208a-442d-4429-8815-9e74e2b163f2" (UID: "4088208a-442d-4429-8815-9e74e2b163f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.054992 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p266r\" (UniqueName: \"kubernetes.io/projected/4088208a-442d-4429-8815-9e74e2b163f2-kube-api-access-p266r\") on node \"crc\" DevicePath \"\"" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.055256 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.055266 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4088208a-442d-4429-8815-9e74e2b163f2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.351834 4711 generic.go:334] "Generic (PLEG): container finished" podID="4088208a-442d-4429-8815-9e74e2b163f2" containerID="7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93" exitCode=0 Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.351874 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nx29" event={"ID":"4088208a-442d-4429-8815-9e74e2b163f2","Type":"ContainerDied","Data":"7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93"} Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.351904 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nx29" event={"ID":"4088208a-442d-4429-8815-9e74e2b163f2","Type":"ContainerDied","Data":"2ed21696a4d51c45f88d76d4bd16fc0e9eafb80a5f5a9f7a4bdd7034b7b52a26"} Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.351920 4711 scope.go:117] "RemoveContainer" containerID="7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.351994 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8nx29" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.387904 4711 scope.go:117] "RemoveContainer" containerID="6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.396638 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nx29"] Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.407831 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nx29"] Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.421089 4711 scope.go:117] "RemoveContainer" containerID="a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.459590 4711 scope.go:117] "RemoveContainer" containerID="7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93" Dec 05 13:46:03 crc kubenswrapper[4711]: E1205 13:46:03.460458 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93\": container with ID starting with 7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93 not found: ID does not exist" containerID="7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.460495 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93"} err="failed to get container status \"7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93\": rpc error: code = NotFound desc = could not find container \"7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93\": container with ID starting with 7225f4e8bdb7a28aa256dfd4d54d631b70009af3f67c163736381865b44c1f93 not found: ID does not exist" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.460523 4711 scope.go:117] "RemoveContainer" containerID="6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0" Dec 05 13:46:03 crc kubenswrapper[4711]: E1205 13:46:03.461534 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0\": container with ID starting with 6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0 not found: ID does not exist" containerID="6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.461566 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0"} err="failed to get container status \"6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0\": rpc error: code = NotFound desc = could not find container \"6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0\": container with ID starting with 6358b2a74bd7d90bb8b63a7542532ff3bb0b4fcdb8986ddbc5971b5bc315dff0 not found: ID does not exist" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.461586 4711 scope.go:117] "RemoveContainer" containerID="a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73" Dec 05 13:46:03 crc kubenswrapper[4711]: E1205 13:46:03.463625 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73\": container with ID starting with a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73 not found: ID does not exist" containerID="a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73" Dec 05 13:46:03 crc kubenswrapper[4711]: I1205 13:46:03.463656 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73"} err="failed to get container status \"a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73\": rpc error: code = NotFound desc = could not find container \"a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73\": container with ID starting with a262bfecb98744084eae523f418996aca70005e3e94001331ea8b9ca4976ee73 not found: ID does not exist" Dec 05 13:46:04 crc kubenswrapper[4711]: I1205 13:46:04.698915 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4088208a-442d-4429-8815-9e74e2b163f2" path="/var/lib/kubelet/pods/4088208a-442d-4429-8815-9e74e2b163f2/volumes" Dec 05 13:46:08 crc kubenswrapper[4711]: I1205 13:46:08.690021 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:46:08 crc kubenswrapper[4711]: E1205 13:46:08.691674 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:46:22 crc kubenswrapper[4711]: I1205 13:46:22.685102 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:46:22 crc kubenswrapper[4711]: E1205 13:46:22.686708 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:46:33 crc kubenswrapper[4711]: I1205 13:46:33.682930 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:46:33 crc kubenswrapper[4711]: E1205 13:46:33.683573 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:46:48 crc kubenswrapper[4711]: I1205 13:46:48.694554 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:46:48 crc kubenswrapper[4711]: E1205 13:46:48.695524 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:47:00 crc kubenswrapper[4711]: I1205 13:47:00.683812 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:47:00 crc kubenswrapper[4711]: E1205 13:47:00.684486 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:47:11 crc kubenswrapper[4711]: I1205 13:47:11.683231 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:47:11 crc kubenswrapper[4711]: E1205 13:47:11.684079 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:47:24 crc kubenswrapper[4711]: I1205 13:47:24.683586 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:47:24 crc kubenswrapper[4711]: E1205 13:47:24.684401 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:47:36 crc kubenswrapper[4711]: I1205 13:47:36.684725 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:47:36 crc kubenswrapper[4711]: E1205 13:47:36.685512 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:47:50 crc kubenswrapper[4711]: I1205 13:47:50.683128 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:47:50 crc kubenswrapper[4711]: E1205 13:47:50.683769 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:48:01 crc kubenswrapper[4711]: I1205 13:48:01.683793 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:48:01 crc kubenswrapper[4711]: E1205 13:48:01.684892 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:48:14 crc kubenswrapper[4711]: I1205 13:48:14.683642 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:48:14 crc kubenswrapper[4711]: E1205 13:48:14.684555 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:48:27 crc kubenswrapper[4711]: I1205 13:48:27.687491 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:48:27 crc kubenswrapper[4711]: E1205 13:48:27.688475 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:48:42 crc kubenswrapper[4711]: I1205 13:48:42.683466 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:48:42 crc kubenswrapper[4711]: E1205 13:48:42.684319 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:48:53 crc kubenswrapper[4711]: I1205 13:48:53.683907 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:48:53 crc kubenswrapper[4711]: E1205 13:48:53.685051 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.670233 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7cchb"] Dec 05 13:48:59 crc kubenswrapper[4711]: E1205 13:48:59.671556 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4088208a-442d-4429-8815-9e74e2b163f2" containerName="registry-server" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.671583 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4088208a-442d-4429-8815-9e74e2b163f2" containerName="registry-server" Dec 05 13:48:59 crc kubenswrapper[4711]: E1205 13:48:59.671618 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4088208a-442d-4429-8815-9e74e2b163f2" containerName="extract-utilities" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.671630 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4088208a-442d-4429-8815-9e74e2b163f2" containerName="extract-utilities" Dec 05 13:48:59 crc kubenswrapper[4711]: E1205 13:48:59.671687 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4088208a-442d-4429-8815-9e74e2b163f2" containerName="extract-content" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.671698 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4088208a-442d-4429-8815-9e74e2b163f2" containerName="extract-content" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.672041 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4088208a-442d-4429-8815-9e74e2b163f2" containerName="registry-server" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.674701 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.721366 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7cchb"] Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.739315 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-catalog-content\") pod \"redhat-operators-7cchb\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.739445 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzbqg\" (UniqueName: \"kubernetes.io/projected/016e2549-bb44-4322-808a-8c8e2c4b122a-kube-api-access-rzbqg\") pod \"redhat-operators-7cchb\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.739605 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-utilities\") pod \"redhat-operators-7cchb\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.841255 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-catalog-content\") pod \"redhat-operators-7cchb\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.841309 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzbqg\" (UniqueName: \"kubernetes.io/projected/016e2549-bb44-4322-808a-8c8e2c4b122a-kube-api-access-rzbqg\") pod \"redhat-operators-7cchb\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.841367 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-utilities\") pod \"redhat-operators-7cchb\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.842177 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-utilities\") pod \"redhat-operators-7cchb\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.842361 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-catalog-content\") pod \"redhat-operators-7cchb\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:48:59 crc kubenswrapper[4711]: I1205 13:48:59.866456 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzbqg\" (UniqueName: \"kubernetes.io/projected/016e2549-bb44-4322-808a-8c8e2c4b122a-kube-api-access-rzbqg\") pod \"redhat-operators-7cchb\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:49:00 crc kubenswrapper[4711]: I1205 13:49:00.003570 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:49:00 crc kubenswrapper[4711]: I1205 13:49:00.675153 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7cchb"] Dec 05 13:49:01 crc kubenswrapper[4711]: I1205 13:49:01.525044 4711 generic.go:334] "Generic (PLEG): container finished" podID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerID="d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f" exitCode=0 Dec 05 13:49:01 crc kubenswrapper[4711]: I1205 13:49:01.525559 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cchb" event={"ID":"016e2549-bb44-4322-808a-8c8e2c4b122a","Type":"ContainerDied","Data":"d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f"} Dec 05 13:49:01 crc kubenswrapper[4711]: I1205 13:49:01.525583 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cchb" event={"ID":"016e2549-bb44-4322-808a-8c8e2c4b122a","Type":"ContainerStarted","Data":"6b6fff5c7f1b39f01cc82a334a53ac355cb368e91370131fb2c6ba5a2a3beb1b"} Dec 05 13:49:01 crc kubenswrapper[4711]: I1205 13:49:01.527647 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:49:02 crc kubenswrapper[4711]: I1205 13:49:02.549994 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cchb" event={"ID":"016e2549-bb44-4322-808a-8c8e2c4b122a","Type":"ContainerStarted","Data":"c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3"} Dec 05 13:49:04 crc kubenswrapper[4711]: I1205 13:49:04.581460 4711 generic.go:334] "Generic (PLEG): container finished" podID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerID="c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3" exitCode=0 Dec 05 13:49:04 crc kubenswrapper[4711]: I1205 13:49:04.581495 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cchb" event={"ID":"016e2549-bb44-4322-808a-8c8e2c4b122a","Type":"ContainerDied","Data":"c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3"} Dec 05 13:49:05 crc kubenswrapper[4711]: I1205 13:49:05.596445 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cchb" event={"ID":"016e2549-bb44-4322-808a-8c8e2c4b122a","Type":"ContainerStarted","Data":"b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e"} Dec 05 13:49:05 crc kubenswrapper[4711]: I1205 13:49:05.616204 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7cchb" podStartSLOduration=3.158375507 podStartE2EDuration="6.616186627s" podCreationTimestamp="2025-12-05 13:48:59 +0000 UTC" firstStartedPulling="2025-12-05 13:49:01.527470855 +0000 UTC m=+5987.111793185" lastFinishedPulling="2025-12-05 13:49:04.985281975 +0000 UTC m=+5990.569604305" observedRunningTime="2025-12-05 13:49:05.61225935 +0000 UTC m=+5991.196581680" watchObservedRunningTime="2025-12-05 13:49:05.616186627 +0000 UTC m=+5991.200509107" Dec 05 13:49:05 crc kubenswrapper[4711]: I1205 13:49:05.683518 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:49:05 crc kubenswrapper[4711]: E1205 13:49:05.683796 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:49:06 crc kubenswrapper[4711]: I1205 13:49:06.606374 4711 generic.go:334] "Generic (PLEG): container finished" podID="745cbd1d-0e83-42d6-b6b4-b57638936898" containerID="757264c72003b338c898682caf7fc892bd46721c14e4c9bfe7dc4e8297114dd1" exitCode=1 Dec 05 13:49:06 crc kubenswrapper[4711]: I1205 13:49:06.606431 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"745cbd1d-0e83-42d6-b6b4-b57638936898","Type":"ContainerDied","Data":"757264c72003b338c898682caf7fc892bd46721c14e4c9bfe7dc4e8297114dd1"} Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.053126 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.243814 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config-secret\") pod \"745cbd1d-0e83-42d6-b6b4-b57638936898\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.243890 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-workdir\") pod \"745cbd1d-0e83-42d6-b6b4-b57638936898\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.243955 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ca-certs\") pod \"745cbd1d-0e83-42d6-b6b4-b57638936898\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.243988 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-temporary\") pod \"745cbd1d-0e83-42d6-b6b4-b57638936898\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.244016 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-config-data\") pod \"745cbd1d-0e83-42d6-b6b4-b57638936898\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.244628 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "745cbd1d-0e83-42d6-b6b4-b57638936898" (UID: "745cbd1d-0e83-42d6-b6b4-b57638936898"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.244782 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config\") pod \"745cbd1d-0e83-42d6-b6b4-b57638936898\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.244821 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"745cbd1d-0e83-42d6-b6b4-b57638936898\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.244879 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfcd5\" (UniqueName: \"kubernetes.io/projected/745cbd1d-0e83-42d6-b6b4-b57638936898-kube-api-access-nfcd5\") pod \"745cbd1d-0e83-42d6-b6b4-b57638936898\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.244996 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ssh-key\") pod \"745cbd1d-0e83-42d6-b6b4-b57638936898\" (UID: \"745cbd1d-0e83-42d6-b6b4-b57638936898\") " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.245023 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-config-data" (OuterVolumeSpecName: "config-data") pod "745cbd1d-0e83-42d6-b6b4-b57638936898" (UID: "745cbd1d-0e83-42d6-b6b4-b57638936898"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.245751 4711 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.245773 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.249415 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/745cbd1d-0e83-42d6-b6b4-b57638936898-kube-api-access-nfcd5" (OuterVolumeSpecName: "kube-api-access-nfcd5") pod "745cbd1d-0e83-42d6-b6b4-b57638936898" (UID: "745cbd1d-0e83-42d6-b6b4-b57638936898"). InnerVolumeSpecName "kube-api-access-nfcd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.252610 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "test-operator-logs") pod "745cbd1d-0e83-42d6-b6b4-b57638936898" (UID: "745cbd1d-0e83-42d6-b6b4-b57638936898"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.254107 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "745cbd1d-0e83-42d6-b6b4-b57638936898" (UID: "745cbd1d-0e83-42d6-b6b4-b57638936898"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.274372 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "745cbd1d-0e83-42d6-b6b4-b57638936898" (UID: "745cbd1d-0e83-42d6-b6b4-b57638936898"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.298896 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "745cbd1d-0e83-42d6-b6b4-b57638936898" (UID: "745cbd1d-0e83-42d6-b6b4-b57638936898"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.305097 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "745cbd1d-0e83-42d6-b6b4-b57638936898" (UID: "745cbd1d-0e83-42d6-b6b4-b57638936898"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.327813 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "745cbd1d-0e83-42d6-b6b4-b57638936898" (UID: "745cbd1d-0e83-42d6-b6b4-b57638936898"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.346436 4711 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.346475 4711 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/745cbd1d-0e83-42d6-b6b4-b57638936898-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.346487 4711 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.346499 4711 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/745cbd1d-0e83-42d6-b6b4-b57638936898-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.346532 4711 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.346547 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfcd5\" (UniqueName: \"kubernetes.io/projected/745cbd1d-0e83-42d6-b6b4-b57638936898-kube-api-access-nfcd5\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.346557 4711 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/745cbd1d-0e83-42d6-b6b4-b57638936898-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.369091 4711 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.448876 4711 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.630702 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"745cbd1d-0e83-42d6-b6b4-b57638936898","Type":"ContainerDied","Data":"ccc4cc273363a0a17a9e60374d46fe7a066657e560fbc8b8052b5fc4899d1d70"} Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.631105 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ccc4cc273363a0a17a9e60374d46fe7a066657e560fbc8b8052b5fc4899d1d70" Dec 05 13:49:08 crc kubenswrapper[4711]: I1205 13:49:08.631000 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 13:49:10 crc kubenswrapper[4711]: I1205 13:49:10.004189 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:49:10 crc kubenswrapper[4711]: I1205 13:49:10.004774 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:49:11 crc kubenswrapper[4711]: I1205 13:49:11.051111 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7cchb" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerName="registry-server" probeResult="failure" output=< Dec 05 13:49:11 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 13:49:11 crc kubenswrapper[4711]: > Dec 05 13:49:20 crc kubenswrapper[4711]: I1205 13:49:20.055058 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:49:20 crc kubenswrapper[4711]: I1205 13:49:20.114412 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:49:20 crc kubenswrapper[4711]: I1205 13:49:20.292575 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7cchb"] Dec 05 13:49:20 crc kubenswrapper[4711]: I1205 13:49:20.683465 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.342030 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 13:49:21 crc kubenswrapper[4711]: E1205 13:49:21.343335 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="745cbd1d-0e83-42d6-b6b4-b57638936898" containerName="tempest-tests-tempest-tests-runner" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.343366 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="745cbd1d-0e83-42d6-b6b4-b57638936898" containerName="tempest-tests-tempest-tests-runner" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.343910 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="745cbd1d-0e83-42d6-b6b4-b57638936898" containerName="tempest-tests-tempest-tests-runner" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.345265 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.351122 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-8fbf8" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.359424 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.529773 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdbh5\" (UniqueName: \"kubernetes.io/projected/5bcd6daa-830a-4c2b-b4e7-b45d3719ab64-kube-api-access-kdbh5\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5bcd6daa-830a-4c2b-b4e7-b45d3719ab64\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.529992 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5bcd6daa-830a-4c2b-b4e7-b45d3719ab64\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.632411 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5bcd6daa-830a-4c2b-b4e7-b45d3719ab64\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.632557 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdbh5\" (UniqueName: \"kubernetes.io/projected/5bcd6daa-830a-4c2b-b4e7-b45d3719ab64-kube-api-access-kdbh5\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5bcd6daa-830a-4c2b-b4e7-b45d3719ab64\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.632855 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5bcd6daa-830a-4c2b-b4e7-b45d3719ab64\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.656536 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdbh5\" (UniqueName: \"kubernetes.io/projected/5bcd6daa-830a-4c2b-b4e7-b45d3719ab64-kube-api-access-kdbh5\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5bcd6daa-830a-4c2b-b4e7-b45d3719ab64\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.676650 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5bcd6daa-830a-4c2b-b4e7-b45d3719ab64\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.687122 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.782720 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7cchb" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerName="registry-server" containerID="cri-o://b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e" gracePeriod=2 Dec 05 13:49:21 crc kubenswrapper[4711]: I1205 13:49:21.782892 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"1244bad30887d8c80c8f4234338b30be7407f0295c05bb01ca7c173b5ecec36d"} Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.175077 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 13:49:22 crc kubenswrapper[4711]: W1205 13:49:22.176831 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bcd6daa_830a_4c2b_b4e7_b45d3719ab64.slice/crio-1d8d65337a295b078af9e4d2775cd1634caa630808ffafd0d144bfa4b6f06f8c WatchSource:0}: Error finding container 1d8d65337a295b078af9e4d2775cd1634caa630808ffafd0d144bfa4b6f06f8c: Status 404 returned error can't find the container with id 1d8d65337a295b078af9e4d2775cd1634caa630808ffafd0d144bfa4b6f06f8c Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.289732 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.471488 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-utilities\") pod \"016e2549-bb44-4322-808a-8c8e2c4b122a\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.471593 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzbqg\" (UniqueName: \"kubernetes.io/projected/016e2549-bb44-4322-808a-8c8e2c4b122a-kube-api-access-rzbqg\") pod \"016e2549-bb44-4322-808a-8c8e2c4b122a\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.471898 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-catalog-content\") pod \"016e2549-bb44-4322-808a-8c8e2c4b122a\" (UID: \"016e2549-bb44-4322-808a-8c8e2c4b122a\") " Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.472560 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-utilities" (OuterVolumeSpecName: "utilities") pod "016e2549-bb44-4322-808a-8c8e2c4b122a" (UID: "016e2549-bb44-4322-808a-8c8e2c4b122a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.485758 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/016e2549-bb44-4322-808a-8c8e2c4b122a-kube-api-access-rzbqg" (OuterVolumeSpecName: "kube-api-access-rzbqg") pod "016e2549-bb44-4322-808a-8c8e2c4b122a" (UID: "016e2549-bb44-4322-808a-8c8e2c4b122a"). InnerVolumeSpecName "kube-api-access-rzbqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.573870 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.574163 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzbqg\" (UniqueName: \"kubernetes.io/projected/016e2549-bb44-4322-808a-8c8e2c4b122a-kube-api-access-rzbqg\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.583366 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "016e2549-bb44-4322-808a-8c8e2c4b122a" (UID: "016e2549-bb44-4322-808a-8c8e2c4b122a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.675801 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/016e2549-bb44-4322-808a-8c8e2c4b122a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.798383 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"5bcd6daa-830a-4c2b-b4e7-b45d3719ab64","Type":"ContainerStarted","Data":"1d8d65337a295b078af9e4d2775cd1634caa630808ffafd0d144bfa4b6f06f8c"} Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.802287 4711 generic.go:334] "Generic (PLEG): container finished" podID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerID="b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e" exitCode=0 Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.802312 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cchb" event={"ID":"016e2549-bb44-4322-808a-8c8e2c4b122a","Type":"ContainerDied","Data":"b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e"} Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.802531 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cchb" event={"ID":"016e2549-bb44-4322-808a-8c8e2c4b122a","Type":"ContainerDied","Data":"6b6fff5c7f1b39f01cc82a334a53ac355cb368e91370131fb2c6ba5a2a3beb1b"} Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.802553 4711 scope.go:117] "RemoveContainer" containerID="b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.802622 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7cchb" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.843090 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7cchb"] Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.847120 4711 scope.go:117] "RemoveContainer" containerID="c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3" Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.868753 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7cchb"] Dec 05 13:49:22 crc kubenswrapper[4711]: I1205 13:49:22.874506 4711 scope.go:117] "RemoveContainer" containerID="d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f" Dec 05 13:49:23 crc kubenswrapper[4711]: I1205 13:49:23.288752 4711 scope.go:117] "RemoveContainer" containerID="b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e" Dec 05 13:49:23 crc kubenswrapper[4711]: E1205 13:49:23.291280 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e\": container with ID starting with b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e not found: ID does not exist" containerID="b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e" Dec 05 13:49:23 crc kubenswrapper[4711]: I1205 13:49:23.291352 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e"} err="failed to get container status \"b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e\": rpc error: code = NotFound desc = could not find container \"b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e\": container with ID starting with b3e7132e6cc06c2ff9b29b23f5cdac35125f6e050372f1e1d346117c6a61678e not found: ID does not exist" Dec 05 13:49:23 crc kubenswrapper[4711]: I1205 13:49:23.291410 4711 scope.go:117] "RemoveContainer" containerID="c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3" Dec 05 13:49:23 crc kubenswrapper[4711]: E1205 13:49:23.291991 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3\": container with ID starting with c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3 not found: ID does not exist" containerID="c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3" Dec 05 13:49:23 crc kubenswrapper[4711]: I1205 13:49:23.292079 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3"} err="failed to get container status \"c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3\": rpc error: code = NotFound desc = could not find container \"c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3\": container with ID starting with c83288ad0ba3fa8dcca98c65152a69faa0de03594140bf0d10dd30b5500c9fb3 not found: ID does not exist" Dec 05 13:49:23 crc kubenswrapper[4711]: I1205 13:49:23.292133 4711 scope.go:117] "RemoveContainer" containerID="d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f" Dec 05 13:49:23 crc kubenswrapper[4711]: E1205 13:49:23.292592 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f\": container with ID starting with d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f not found: ID does not exist" containerID="d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f" Dec 05 13:49:23 crc kubenswrapper[4711]: I1205 13:49:23.292634 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f"} err="failed to get container status \"d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f\": rpc error: code = NotFound desc = could not find container \"d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f\": container with ID starting with d431fa489667f80088a3de9916ee7669ed026a205f23e2cd7702109613da5e0f not found: ID does not exist" Dec 05 13:49:23 crc kubenswrapper[4711]: I1205 13:49:23.817656 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"5bcd6daa-830a-4c2b-b4e7-b45d3719ab64","Type":"ContainerStarted","Data":"5bda6d52fe27913f5fb1a4736d1fec2d24c8996fdfd58633b751dca6ec3e93a4"} Dec 05 13:49:23 crc kubenswrapper[4711]: I1205 13:49:23.841927 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.6905864560000001 podStartE2EDuration="2.841902048s" podCreationTimestamp="2025-12-05 13:49:21 +0000 UTC" firstStartedPulling="2025-12-05 13:49:22.179946114 +0000 UTC m=+6007.764268444" lastFinishedPulling="2025-12-05 13:49:23.331261706 +0000 UTC m=+6008.915584036" observedRunningTime="2025-12-05 13:49:23.839272764 +0000 UTC m=+6009.423595094" watchObservedRunningTime="2025-12-05 13:49:23.841902048 +0000 UTC m=+6009.426224378" Dec 05 13:49:24 crc kubenswrapper[4711]: I1205 13:49:24.697797 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" path="/var/lib/kubelet/pods/016e2549-bb44-4322-808a-8c8e2c4b122a/volumes" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.740175 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4m758/must-gather-znh4x"] Dec 05 13:50:05 crc kubenswrapper[4711]: E1205 13:50:05.741615 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerName="extract-content" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.741632 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerName="extract-content" Dec 05 13:50:05 crc kubenswrapper[4711]: E1205 13:50:05.741679 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerName="extract-utilities" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.741686 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerName="extract-utilities" Dec 05 13:50:05 crc kubenswrapper[4711]: E1205 13:50:05.741695 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerName="registry-server" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.741702 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerName="registry-server" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.742253 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="016e2549-bb44-4322-808a-8c8e2c4b122a" containerName="registry-server" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.744099 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.747091 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4m758"/"kube-root-ca.crt" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.747273 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4m758"/"openshift-service-ca.crt" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.747498 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-4m758"/"default-dockercfg-4q94m" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.756562 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4m758/must-gather-znh4x"] Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.820321 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znkft\" (UniqueName: \"kubernetes.io/projected/14620ce2-6538-49f6-8477-ed3b2c5076bb-kube-api-access-znkft\") pod \"must-gather-znh4x\" (UID: \"14620ce2-6538-49f6-8477-ed3b2c5076bb\") " pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.820618 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/14620ce2-6538-49f6-8477-ed3b2c5076bb-must-gather-output\") pod \"must-gather-znh4x\" (UID: \"14620ce2-6538-49f6-8477-ed3b2c5076bb\") " pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.922300 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/14620ce2-6538-49f6-8477-ed3b2c5076bb-must-gather-output\") pod \"must-gather-znh4x\" (UID: \"14620ce2-6538-49f6-8477-ed3b2c5076bb\") " pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.922457 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znkft\" (UniqueName: \"kubernetes.io/projected/14620ce2-6538-49f6-8477-ed3b2c5076bb-kube-api-access-znkft\") pod \"must-gather-znh4x\" (UID: \"14620ce2-6538-49f6-8477-ed3b2c5076bb\") " pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.922914 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/14620ce2-6538-49f6-8477-ed3b2c5076bb-must-gather-output\") pod \"must-gather-znh4x\" (UID: \"14620ce2-6538-49f6-8477-ed3b2c5076bb\") " pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:50:05 crc kubenswrapper[4711]: I1205 13:50:05.948059 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znkft\" (UniqueName: \"kubernetes.io/projected/14620ce2-6538-49f6-8477-ed3b2c5076bb-kube-api-access-znkft\") pod \"must-gather-znh4x\" (UID: \"14620ce2-6538-49f6-8477-ed3b2c5076bb\") " pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:50:06 crc kubenswrapper[4711]: I1205 13:50:06.093311 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:50:06 crc kubenswrapper[4711]: I1205 13:50:06.549617 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4m758/must-gather-znh4x"] Dec 05 13:50:07 crc kubenswrapper[4711]: I1205 13:50:07.241766 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/must-gather-znh4x" event={"ID":"14620ce2-6538-49f6-8477-ed3b2c5076bb","Type":"ContainerStarted","Data":"9c40133028bb4ddf9d0d321ea9c652dbafb6d9bfccb1436c605204f3dd2fe408"} Dec 05 13:50:14 crc kubenswrapper[4711]: I1205 13:50:14.336400 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/must-gather-znh4x" event={"ID":"14620ce2-6538-49f6-8477-ed3b2c5076bb","Type":"ContainerStarted","Data":"a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd"} Dec 05 13:50:14 crc kubenswrapper[4711]: I1205 13:50:14.336831 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/must-gather-znh4x" event={"ID":"14620ce2-6538-49f6-8477-ed3b2c5076bb","Type":"ContainerStarted","Data":"d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9"} Dec 05 13:50:14 crc kubenswrapper[4711]: I1205 13:50:14.363906 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4m758/must-gather-znh4x" podStartSLOduration=2.671000888 podStartE2EDuration="9.363888378s" podCreationTimestamp="2025-12-05 13:50:05 +0000 UTC" firstStartedPulling="2025-12-05 13:50:06.556243641 +0000 UTC m=+6052.140565971" lastFinishedPulling="2025-12-05 13:50:13.249131131 +0000 UTC m=+6058.833453461" observedRunningTime="2025-12-05 13:50:14.358730951 +0000 UTC m=+6059.943053281" watchObservedRunningTime="2025-12-05 13:50:14.363888378 +0000 UTC m=+6059.948210698" Dec 05 13:50:17 crc kubenswrapper[4711]: I1205 13:50:17.446839 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4m758/crc-debug-5rnzf"] Dec 05 13:50:17 crc kubenswrapper[4711]: I1205 13:50:17.448683 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:50:17 crc kubenswrapper[4711]: I1205 13:50:17.575863 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7d1b9c9c-7e08-4154-afd5-30a31a84a980-host\") pod \"crc-debug-5rnzf\" (UID: \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\") " pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:50:17 crc kubenswrapper[4711]: I1205 13:50:17.575985 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dllr9\" (UniqueName: \"kubernetes.io/projected/7d1b9c9c-7e08-4154-afd5-30a31a84a980-kube-api-access-dllr9\") pod \"crc-debug-5rnzf\" (UID: \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\") " pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:50:17 crc kubenswrapper[4711]: I1205 13:50:17.677438 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dllr9\" (UniqueName: \"kubernetes.io/projected/7d1b9c9c-7e08-4154-afd5-30a31a84a980-kube-api-access-dllr9\") pod \"crc-debug-5rnzf\" (UID: \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\") " pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:50:17 crc kubenswrapper[4711]: I1205 13:50:17.677588 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7d1b9c9c-7e08-4154-afd5-30a31a84a980-host\") pod \"crc-debug-5rnzf\" (UID: \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\") " pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:50:17 crc kubenswrapper[4711]: I1205 13:50:17.677734 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7d1b9c9c-7e08-4154-afd5-30a31a84a980-host\") pod \"crc-debug-5rnzf\" (UID: \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\") " pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:50:17 crc kubenswrapper[4711]: I1205 13:50:17.699874 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dllr9\" (UniqueName: \"kubernetes.io/projected/7d1b9c9c-7e08-4154-afd5-30a31a84a980-kube-api-access-dllr9\") pod \"crc-debug-5rnzf\" (UID: \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\") " pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:50:17 crc kubenswrapper[4711]: I1205 13:50:17.767172 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:50:17 crc kubenswrapper[4711]: W1205 13:50:17.799366 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d1b9c9c_7e08_4154_afd5_30a31a84a980.slice/crio-f9207d8d28247caf1498cbf7391cf63b13924200cea12c9a6fd526a1a17ca472 WatchSource:0}: Error finding container f9207d8d28247caf1498cbf7391cf63b13924200cea12c9a6fd526a1a17ca472: Status 404 returned error can't find the container with id f9207d8d28247caf1498cbf7391cf63b13924200cea12c9a6fd526a1a17ca472 Dec 05 13:50:18 crc kubenswrapper[4711]: I1205 13:50:18.376475 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/crc-debug-5rnzf" event={"ID":"7d1b9c9c-7e08-4154-afd5-30a31a84a980","Type":"ContainerStarted","Data":"f9207d8d28247caf1498cbf7391cf63b13924200cea12c9a6fd526a1a17ca472"} Dec 05 13:50:30 crc kubenswrapper[4711]: I1205 13:50:30.495144 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/crc-debug-5rnzf" event={"ID":"7d1b9c9c-7e08-4154-afd5-30a31a84a980","Type":"ContainerStarted","Data":"089d721efb3ed1181db871930100f38aff3d1c173e91991090bbc01ce0408282"} Dec 05 13:50:30 crc kubenswrapper[4711]: I1205 13:50:30.516563 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4m758/crc-debug-5rnzf" podStartSLOduration=1.84448669 podStartE2EDuration="13.516540036s" podCreationTimestamp="2025-12-05 13:50:17 +0000 UTC" firstStartedPulling="2025-12-05 13:50:17.801854982 +0000 UTC m=+6063.386177312" lastFinishedPulling="2025-12-05 13:50:29.473908328 +0000 UTC m=+6075.058230658" observedRunningTime="2025-12-05 13:50:30.509058773 +0000 UTC m=+6076.093381103" watchObservedRunningTime="2025-12-05 13:50:30.516540036 +0000 UTC m=+6076.100862366" Dec 05 13:50:30 crc kubenswrapper[4711]: I1205 13:50:30.989982 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-295qb"] Dec 05 13:50:30 crc kubenswrapper[4711]: I1205 13:50:30.993082 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.006522 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-295qb"] Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.052884 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-utilities\") pod \"certified-operators-295qb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.052993 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-catalog-content\") pod \"certified-operators-295qb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.053102 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nd4r\" (UniqueName: \"kubernetes.io/projected/0b8142e2-93cb-40df-a7f5-e33e71d62beb-kube-api-access-4nd4r\") pod \"certified-operators-295qb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.154663 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-utilities\") pod \"certified-operators-295qb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.154749 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-catalog-content\") pod \"certified-operators-295qb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.154824 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nd4r\" (UniqueName: \"kubernetes.io/projected/0b8142e2-93cb-40df-a7f5-e33e71d62beb-kube-api-access-4nd4r\") pod \"certified-operators-295qb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.155291 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-utilities\") pod \"certified-operators-295qb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.155492 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-catalog-content\") pod \"certified-operators-295qb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.175678 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nd4r\" (UniqueName: \"kubernetes.io/projected/0b8142e2-93cb-40df-a7f5-e33e71d62beb-kube-api-access-4nd4r\") pod \"certified-operators-295qb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:31 crc kubenswrapper[4711]: I1205 13:50:31.341658 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:32 crc kubenswrapper[4711]: I1205 13:50:32.118040 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-295qb"] Dec 05 13:50:32 crc kubenswrapper[4711]: W1205 13:50:32.125662 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b8142e2_93cb_40df_a7f5_e33e71d62beb.slice/crio-bea9991a1ad1f6af2725758e9c1737109a3c3a814333a72e6032dc01bee039f3 WatchSource:0}: Error finding container bea9991a1ad1f6af2725758e9c1737109a3c3a814333a72e6032dc01bee039f3: Status 404 returned error can't find the container with id bea9991a1ad1f6af2725758e9c1737109a3c3a814333a72e6032dc01bee039f3 Dec 05 13:50:32 crc kubenswrapper[4711]: I1205 13:50:32.531128 4711 generic.go:334] "Generic (PLEG): container finished" podID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerID="4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7" exitCode=0 Dec 05 13:50:32 crc kubenswrapper[4711]: I1205 13:50:32.531190 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-295qb" event={"ID":"0b8142e2-93cb-40df-a7f5-e33e71d62beb","Type":"ContainerDied","Data":"4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7"} Dec 05 13:50:32 crc kubenswrapper[4711]: I1205 13:50:32.531274 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-295qb" event={"ID":"0b8142e2-93cb-40df-a7f5-e33e71d62beb","Type":"ContainerStarted","Data":"bea9991a1ad1f6af2725758e9c1737109a3c3a814333a72e6032dc01bee039f3"} Dec 05 13:50:33 crc kubenswrapper[4711]: I1205 13:50:33.542448 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-295qb" event={"ID":"0b8142e2-93cb-40df-a7f5-e33e71d62beb","Type":"ContainerStarted","Data":"f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619"} Dec 05 13:50:36 crc kubenswrapper[4711]: I1205 13:50:36.577750 4711 generic.go:334] "Generic (PLEG): container finished" podID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerID="f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619" exitCode=0 Dec 05 13:50:36 crc kubenswrapper[4711]: I1205 13:50:36.577852 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-295qb" event={"ID":"0b8142e2-93cb-40df-a7f5-e33e71d62beb","Type":"ContainerDied","Data":"f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619"} Dec 05 13:50:49 crc kubenswrapper[4711]: I1205 13:50:49.716952 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-295qb" event={"ID":"0b8142e2-93cb-40df-a7f5-e33e71d62beb","Type":"ContainerStarted","Data":"652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76"} Dec 05 13:50:49 crc kubenswrapper[4711]: I1205 13:50:49.741910 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-295qb" podStartSLOduration=3.435917273 podStartE2EDuration="19.74189313s" podCreationTimestamp="2025-12-05 13:50:30 +0000 UTC" firstStartedPulling="2025-12-05 13:50:32.53656594 +0000 UTC m=+6078.120888270" lastFinishedPulling="2025-12-05 13:50:48.842541797 +0000 UTC m=+6094.426864127" observedRunningTime="2025-12-05 13:50:49.73656504 +0000 UTC m=+6095.320887370" watchObservedRunningTime="2025-12-05 13:50:49.74189313 +0000 UTC m=+6095.326215460" Dec 05 13:50:51 crc kubenswrapper[4711]: I1205 13:50:51.342509 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:51 crc kubenswrapper[4711]: I1205 13:50:51.342873 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:50:52 crc kubenswrapper[4711]: I1205 13:50:52.402372 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-295qb" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerName="registry-server" probeResult="failure" output=< Dec 05 13:50:52 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 13:50:52 crc kubenswrapper[4711]: > Dec 05 13:51:01 crc kubenswrapper[4711]: I1205 13:51:01.394171 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:51:01 crc kubenswrapper[4711]: I1205 13:51:01.445334 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:51:02 crc kubenswrapper[4711]: I1205 13:51:02.175271 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-295qb"] Dec 05 13:51:02 crc kubenswrapper[4711]: I1205 13:51:02.860300 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-295qb" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerName="registry-server" containerID="cri-o://652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76" gracePeriod=2 Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.393890 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.520598 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-utilities\") pod \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.521005 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-catalog-content\") pod \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.521043 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nd4r\" (UniqueName: \"kubernetes.io/projected/0b8142e2-93cb-40df-a7f5-e33e71d62beb-kube-api-access-4nd4r\") pod \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\" (UID: \"0b8142e2-93cb-40df-a7f5-e33e71d62beb\") " Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.521646 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-utilities" (OuterVolumeSpecName: "utilities") pod "0b8142e2-93cb-40df-a7f5-e33e71d62beb" (UID: "0b8142e2-93cb-40df-a7f5-e33e71d62beb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.531585 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b8142e2-93cb-40df-a7f5-e33e71d62beb-kube-api-access-4nd4r" (OuterVolumeSpecName: "kube-api-access-4nd4r") pod "0b8142e2-93cb-40df-a7f5-e33e71d62beb" (UID: "0b8142e2-93cb-40df-a7f5-e33e71d62beb"). InnerVolumeSpecName "kube-api-access-4nd4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.573066 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b8142e2-93cb-40df-a7f5-e33e71d62beb" (UID: "0b8142e2-93cb-40df-a7f5-e33e71d62beb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.623272 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.623321 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b8142e2-93cb-40df-a7f5-e33e71d62beb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.623340 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nd4r\" (UniqueName: \"kubernetes.io/projected/0b8142e2-93cb-40df-a7f5-e33e71d62beb-kube-api-access-4nd4r\") on node \"crc\" DevicePath \"\"" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.870573 4711 generic.go:334] "Generic (PLEG): container finished" podID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerID="652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76" exitCode=0 Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.870614 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-295qb" event={"ID":"0b8142e2-93cb-40df-a7f5-e33e71d62beb","Type":"ContainerDied","Data":"652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76"} Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.870630 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-295qb" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.870664 4711 scope.go:117] "RemoveContainer" containerID="652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.870651 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-295qb" event={"ID":"0b8142e2-93cb-40df-a7f5-e33e71d62beb","Type":"ContainerDied","Data":"bea9991a1ad1f6af2725758e9c1737109a3c3a814333a72e6032dc01bee039f3"} Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.902099 4711 scope.go:117] "RemoveContainer" containerID="f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.909169 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-295qb"] Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.921822 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-295qb"] Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.929612 4711 scope.go:117] "RemoveContainer" containerID="4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.971644 4711 scope.go:117] "RemoveContainer" containerID="652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76" Dec 05 13:51:03 crc kubenswrapper[4711]: E1205 13:51:03.972187 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76\": container with ID starting with 652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76 not found: ID does not exist" containerID="652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.972251 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76"} err="failed to get container status \"652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76\": rpc error: code = NotFound desc = could not find container \"652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76\": container with ID starting with 652f0612aee48323f57781d36c37b53e50a80d8e346e44977bc43f823c3e3b76 not found: ID does not exist" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.972281 4711 scope.go:117] "RemoveContainer" containerID="f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619" Dec 05 13:51:03 crc kubenswrapper[4711]: E1205 13:51:03.972748 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619\": container with ID starting with f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619 not found: ID does not exist" containerID="f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.972788 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619"} err="failed to get container status \"f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619\": rpc error: code = NotFound desc = could not find container \"f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619\": container with ID starting with f0d823553836e65349df4efcea8247a74f39364edf3983384b1038a011961619 not found: ID does not exist" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.972802 4711 scope.go:117] "RemoveContainer" containerID="4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7" Dec 05 13:51:03 crc kubenswrapper[4711]: E1205 13:51:03.973317 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7\": container with ID starting with 4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7 not found: ID does not exist" containerID="4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7" Dec 05 13:51:03 crc kubenswrapper[4711]: I1205 13:51:03.973359 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7"} err="failed to get container status \"4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7\": rpc error: code = NotFound desc = could not find container \"4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7\": container with ID starting with 4429a02af0262d1c1151e79c1de04cbf769f30a620dc35d31f5a4d80e11d91e7 not found: ID does not exist" Dec 05 13:51:04 crc kubenswrapper[4711]: I1205 13:51:04.701006 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" path="/var/lib/kubelet/pods/0b8142e2-93cb-40df-a7f5-e33e71d62beb/volumes" Dec 05 13:51:27 crc kubenswrapper[4711]: I1205 13:51:27.091499 4711 generic.go:334] "Generic (PLEG): container finished" podID="7d1b9c9c-7e08-4154-afd5-30a31a84a980" containerID="089d721efb3ed1181db871930100f38aff3d1c173e91991090bbc01ce0408282" exitCode=0 Dec 05 13:51:27 crc kubenswrapper[4711]: I1205 13:51:27.091577 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/crc-debug-5rnzf" event={"ID":"7d1b9c9c-7e08-4154-afd5-30a31a84a980","Type":"ContainerDied","Data":"089d721efb3ed1181db871930100f38aff3d1c173e91991090bbc01ce0408282"} Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.205899 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.239639 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4m758/crc-debug-5rnzf"] Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.250000 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4m758/crc-debug-5rnzf"] Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.355438 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7d1b9c9c-7e08-4154-afd5-30a31a84a980-host\") pod \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\" (UID: \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\") " Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.355618 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dllr9\" (UniqueName: \"kubernetes.io/projected/7d1b9c9c-7e08-4154-afd5-30a31a84a980-kube-api-access-dllr9\") pod \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\" (UID: \"7d1b9c9c-7e08-4154-afd5-30a31a84a980\") " Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.355770 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7d1b9c9c-7e08-4154-afd5-30a31a84a980-host" (OuterVolumeSpecName: "host") pod "7d1b9c9c-7e08-4154-afd5-30a31a84a980" (UID: "7d1b9c9c-7e08-4154-afd5-30a31a84a980"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.356109 4711 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7d1b9c9c-7e08-4154-afd5-30a31a84a980-host\") on node \"crc\" DevicePath \"\"" Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.369237 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d1b9c9c-7e08-4154-afd5-30a31a84a980-kube-api-access-dllr9" (OuterVolumeSpecName: "kube-api-access-dllr9") pod "7d1b9c9c-7e08-4154-afd5-30a31a84a980" (UID: "7d1b9c9c-7e08-4154-afd5-30a31a84a980"). InnerVolumeSpecName "kube-api-access-dllr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.457801 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dllr9\" (UniqueName: \"kubernetes.io/projected/7d1b9c9c-7e08-4154-afd5-30a31a84a980-kube-api-access-dllr9\") on node \"crc\" DevicePath \"\"" Dec 05 13:51:28 crc kubenswrapper[4711]: I1205 13:51:28.696442 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d1b9c9c-7e08-4154-afd5-30a31a84a980" path="/var/lib/kubelet/pods/7d1b9c9c-7e08-4154-afd5-30a31a84a980/volumes" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.110465 4711 scope.go:117] "RemoveContainer" containerID="089d721efb3ed1181db871930100f38aff3d1c173e91991090bbc01ce0408282" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.110513 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-5rnzf" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.566947 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4m758/crc-debug-qp77c"] Dec 05 13:51:29 crc kubenswrapper[4711]: E1205 13:51:29.567732 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerName="extract-utilities" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.567748 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerName="extract-utilities" Dec 05 13:51:29 crc kubenswrapper[4711]: E1205 13:51:29.567759 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerName="registry-server" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.567766 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerName="registry-server" Dec 05 13:51:29 crc kubenswrapper[4711]: E1205 13:51:29.567775 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerName="extract-content" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.567782 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerName="extract-content" Dec 05 13:51:29 crc kubenswrapper[4711]: E1205 13:51:29.567807 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d1b9c9c-7e08-4154-afd5-30a31a84a980" containerName="container-00" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.567813 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d1b9c9c-7e08-4154-afd5-30a31a84a980" containerName="container-00" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.568061 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b8142e2-93cb-40df-a7f5-e33e71d62beb" containerName="registry-server" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.568093 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d1b9c9c-7e08-4154-afd5-30a31a84a980" containerName="container-00" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.568904 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.581218 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9fn8\" (UniqueName: \"kubernetes.io/projected/6976329d-722c-4932-9a88-1debdb58d7c2-kube-api-access-s9fn8\") pod \"crc-debug-qp77c\" (UID: \"6976329d-722c-4932-9a88-1debdb58d7c2\") " pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.581597 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6976329d-722c-4932-9a88-1debdb58d7c2-host\") pod \"crc-debug-qp77c\" (UID: \"6976329d-722c-4932-9a88-1debdb58d7c2\") " pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.682401 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9fn8\" (UniqueName: \"kubernetes.io/projected/6976329d-722c-4932-9a88-1debdb58d7c2-kube-api-access-s9fn8\") pod \"crc-debug-qp77c\" (UID: \"6976329d-722c-4932-9a88-1debdb58d7c2\") " pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.682554 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6976329d-722c-4932-9a88-1debdb58d7c2-host\") pod \"crc-debug-qp77c\" (UID: \"6976329d-722c-4932-9a88-1debdb58d7c2\") " pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.682703 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6976329d-722c-4932-9a88-1debdb58d7c2-host\") pod \"crc-debug-qp77c\" (UID: \"6976329d-722c-4932-9a88-1debdb58d7c2\") " pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.707918 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9fn8\" (UniqueName: \"kubernetes.io/projected/6976329d-722c-4932-9a88-1debdb58d7c2-kube-api-access-s9fn8\") pod \"crc-debug-qp77c\" (UID: \"6976329d-722c-4932-9a88-1debdb58d7c2\") " pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:29 crc kubenswrapper[4711]: I1205 13:51:29.891183 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:30 crc kubenswrapper[4711]: I1205 13:51:30.126048 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/crc-debug-qp77c" event={"ID":"6976329d-722c-4932-9a88-1debdb58d7c2","Type":"ContainerStarted","Data":"28dc1a8a2567e42411a533ce9a23feae1a77c3fb2b2863c3090c94f2791c7496"} Dec 05 13:51:31 crc kubenswrapper[4711]: I1205 13:51:31.136927 4711 generic.go:334] "Generic (PLEG): container finished" podID="6976329d-722c-4932-9a88-1debdb58d7c2" containerID="a7a7940643467f280bb1e07dced9cce230ca364d387122d5c55a73eca2f4a1a3" exitCode=0 Dec 05 13:51:31 crc kubenswrapper[4711]: I1205 13:51:31.136981 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/crc-debug-qp77c" event={"ID":"6976329d-722c-4932-9a88-1debdb58d7c2","Type":"ContainerDied","Data":"a7a7940643467f280bb1e07dced9cce230ca364d387122d5c55a73eca2f4a1a3"} Dec 05 13:51:32 crc kubenswrapper[4711]: I1205 13:51:32.275320 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:32 crc kubenswrapper[4711]: I1205 13:51:32.326412 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6976329d-722c-4932-9a88-1debdb58d7c2-host\") pod \"6976329d-722c-4932-9a88-1debdb58d7c2\" (UID: \"6976329d-722c-4932-9a88-1debdb58d7c2\") " Dec 05 13:51:32 crc kubenswrapper[4711]: I1205 13:51:32.326531 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6976329d-722c-4932-9a88-1debdb58d7c2-host" (OuterVolumeSpecName: "host") pod "6976329d-722c-4932-9a88-1debdb58d7c2" (UID: "6976329d-722c-4932-9a88-1debdb58d7c2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 13:51:32 crc kubenswrapper[4711]: I1205 13:51:32.326594 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9fn8\" (UniqueName: \"kubernetes.io/projected/6976329d-722c-4932-9a88-1debdb58d7c2-kube-api-access-s9fn8\") pod \"6976329d-722c-4932-9a88-1debdb58d7c2\" (UID: \"6976329d-722c-4932-9a88-1debdb58d7c2\") " Dec 05 13:51:32 crc kubenswrapper[4711]: I1205 13:51:32.327538 4711 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6976329d-722c-4932-9a88-1debdb58d7c2-host\") on node \"crc\" DevicePath \"\"" Dec 05 13:51:32 crc kubenswrapper[4711]: I1205 13:51:32.333062 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6976329d-722c-4932-9a88-1debdb58d7c2-kube-api-access-s9fn8" (OuterVolumeSpecName: "kube-api-access-s9fn8") pod "6976329d-722c-4932-9a88-1debdb58d7c2" (UID: "6976329d-722c-4932-9a88-1debdb58d7c2"). InnerVolumeSpecName "kube-api-access-s9fn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:51:32 crc kubenswrapper[4711]: I1205 13:51:32.428368 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9fn8\" (UniqueName: \"kubernetes.io/projected/6976329d-722c-4932-9a88-1debdb58d7c2-kube-api-access-s9fn8\") on node \"crc\" DevicePath \"\"" Dec 05 13:51:33 crc kubenswrapper[4711]: I1205 13:51:33.161483 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/crc-debug-qp77c" event={"ID":"6976329d-722c-4932-9a88-1debdb58d7c2","Type":"ContainerDied","Data":"28dc1a8a2567e42411a533ce9a23feae1a77c3fb2b2863c3090c94f2791c7496"} Dec 05 13:51:33 crc kubenswrapper[4711]: I1205 13:51:33.161801 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28dc1a8a2567e42411a533ce9a23feae1a77c3fb2b2863c3090c94f2791c7496" Dec 05 13:51:33 crc kubenswrapper[4711]: I1205 13:51:33.161559 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-qp77c" Dec 05 13:51:33 crc kubenswrapper[4711]: I1205 13:51:33.625482 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4m758/crc-debug-qp77c"] Dec 05 13:51:33 crc kubenswrapper[4711]: I1205 13:51:33.635220 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4m758/crc-debug-qp77c"] Dec 05 13:51:34 crc kubenswrapper[4711]: I1205 13:51:34.695440 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6976329d-722c-4932-9a88-1debdb58d7c2" path="/var/lib/kubelet/pods/6976329d-722c-4932-9a88-1debdb58d7c2/volumes" Dec 05 13:51:34 crc kubenswrapper[4711]: I1205 13:51:34.815139 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4m758/crc-debug-h44fw"] Dec 05 13:51:34 crc kubenswrapper[4711]: E1205 13:51:34.815598 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6976329d-722c-4932-9a88-1debdb58d7c2" containerName="container-00" Dec 05 13:51:34 crc kubenswrapper[4711]: I1205 13:51:34.815874 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6976329d-722c-4932-9a88-1debdb58d7c2" containerName="container-00" Dec 05 13:51:34 crc kubenswrapper[4711]: I1205 13:51:34.816086 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6976329d-722c-4932-9a88-1debdb58d7c2" containerName="container-00" Dec 05 13:51:34 crc kubenswrapper[4711]: I1205 13:51:34.816791 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:34 crc kubenswrapper[4711]: I1205 13:51:34.983841 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c506990-a89e-4abd-923e-e762a209a808-host\") pod \"crc-debug-h44fw\" (UID: \"4c506990-a89e-4abd-923e-e762a209a808\") " pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:34 crc kubenswrapper[4711]: I1205 13:51:34.984597 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgl99\" (UniqueName: \"kubernetes.io/projected/4c506990-a89e-4abd-923e-e762a209a808-kube-api-access-sgl99\") pod \"crc-debug-h44fw\" (UID: \"4c506990-a89e-4abd-923e-e762a209a808\") " pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:35 crc kubenswrapper[4711]: I1205 13:51:35.086902 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c506990-a89e-4abd-923e-e762a209a808-host\") pod \"crc-debug-h44fw\" (UID: \"4c506990-a89e-4abd-923e-e762a209a808\") " pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:35 crc kubenswrapper[4711]: I1205 13:51:35.087073 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c506990-a89e-4abd-923e-e762a209a808-host\") pod \"crc-debug-h44fw\" (UID: \"4c506990-a89e-4abd-923e-e762a209a808\") " pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:35 crc kubenswrapper[4711]: I1205 13:51:35.087124 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgl99\" (UniqueName: \"kubernetes.io/projected/4c506990-a89e-4abd-923e-e762a209a808-kube-api-access-sgl99\") pod \"crc-debug-h44fw\" (UID: \"4c506990-a89e-4abd-923e-e762a209a808\") " pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:35 crc kubenswrapper[4711]: I1205 13:51:35.122285 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgl99\" (UniqueName: \"kubernetes.io/projected/4c506990-a89e-4abd-923e-e762a209a808-kube-api-access-sgl99\") pod \"crc-debug-h44fw\" (UID: \"4c506990-a89e-4abd-923e-e762a209a808\") " pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:35 crc kubenswrapper[4711]: I1205 13:51:35.133461 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:35 crc kubenswrapper[4711]: I1205 13:51:35.190227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/crc-debug-h44fw" event={"ID":"4c506990-a89e-4abd-923e-e762a209a808","Type":"ContainerStarted","Data":"be33b3a3518888e686fa66ee928d17119ff2652a68626cf8e3c56497a43f3558"} Dec 05 13:51:36 crc kubenswrapper[4711]: I1205 13:51:36.207868 4711 generic.go:334] "Generic (PLEG): container finished" podID="4c506990-a89e-4abd-923e-e762a209a808" containerID="f438fb25a78ce1bbe496abbdde7562c149010f0cca0f06903859ceb85208fdbb" exitCode=0 Dec 05 13:51:36 crc kubenswrapper[4711]: I1205 13:51:36.207976 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/crc-debug-h44fw" event={"ID":"4c506990-a89e-4abd-923e-e762a209a808","Type":"ContainerDied","Data":"f438fb25a78ce1bbe496abbdde7562c149010f0cca0f06903859ceb85208fdbb"} Dec 05 13:51:36 crc kubenswrapper[4711]: I1205 13:51:36.255142 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4m758/crc-debug-h44fw"] Dec 05 13:51:36 crc kubenswrapper[4711]: I1205 13:51:36.271643 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4m758/crc-debug-h44fw"] Dec 05 13:51:37 crc kubenswrapper[4711]: I1205 13:51:37.326938 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:37 crc kubenswrapper[4711]: I1205 13:51:37.344547 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c506990-a89e-4abd-923e-e762a209a808-host\") pod \"4c506990-a89e-4abd-923e-e762a209a808\" (UID: \"4c506990-a89e-4abd-923e-e762a209a808\") " Dec 05 13:51:37 crc kubenswrapper[4711]: I1205 13:51:37.344629 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4c506990-a89e-4abd-923e-e762a209a808-host" (OuterVolumeSpecName: "host") pod "4c506990-a89e-4abd-923e-e762a209a808" (UID: "4c506990-a89e-4abd-923e-e762a209a808"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 13:51:37 crc kubenswrapper[4711]: I1205 13:51:37.344763 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgl99\" (UniqueName: \"kubernetes.io/projected/4c506990-a89e-4abd-923e-e762a209a808-kube-api-access-sgl99\") pod \"4c506990-a89e-4abd-923e-e762a209a808\" (UID: \"4c506990-a89e-4abd-923e-e762a209a808\") " Dec 05 13:51:37 crc kubenswrapper[4711]: I1205 13:51:37.345804 4711 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c506990-a89e-4abd-923e-e762a209a808-host\") on node \"crc\" DevicePath \"\"" Dec 05 13:51:37 crc kubenswrapper[4711]: I1205 13:51:37.350975 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c506990-a89e-4abd-923e-e762a209a808-kube-api-access-sgl99" (OuterVolumeSpecName: "kube-api-access-sgl99") pod "4c506990-a89e-4abd-923e-e762a209a808" (UID: "4c506990-a89e-4abd-923e-e762a209a808"). InnerVolumeSpecName "kube-api-access-sgl99". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:51:37 crc kubenswrapper[4711]: I1205 13:51:37.446998 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgl99\" (UniqueName: \"kubernetes.io/projected/4c506990-a89e-4abd-923e-e762a209a808-kube-api-access-sgl99\") on node \"crc\" DevicePath \"\"" Dec 05 13:51:38 crc kubenswrapper[4711]: I1205 13:51:38.230859 4711 scope.go:117] "RemoveContainer" containerID="f438fb25a78ce1bbe496abbdde7562c149010f0cca0f06903859ceb85208fdbb" Dec 05 13:51:38 crc kubenswrapper[4711]: I1205 13:51:38.231221 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/crc-debug-h44fw" Dec 05 13:51:38 crc kubenswrapper[4711]: I1205 13:51:38.694981 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c506990-a89e-4abd-923e-e762a209a808" path="/var/lib/kubelet/pods/4c506990-a89e-4abd-923e-e762a209a808/volumes" Dec 05 13:51:48 crc kubenswrapper[4711]: I1205 13:51:48.300604 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:51:48 crc kubenswrapper[4711]: I1205 13:51:48.301083 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:52:01 crc kubenswrapper[4711]: I1205 13:52:01.994893 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5885f55d9b-n67kv_d605ba82-4b40-4729-a7e7-a038bab81b2b/barbican-api/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.002225 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5885f55d9b-n67kv_d605ba82-4b40-4729-a7e7-a038bab81b2b/barbican-api-log/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.183596 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5467d6b846-6zlkw_83f5120f-4476-4a96-b1d9-238db3564735/barbican-keystone-listener/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.260266 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5467d6b846-6zlkw_83f5120f-4476-4a96-b1d9-238db3564735/barbican-keystone-listener-log/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.381182 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-65744cb55-qz24w_483e0862-32c8-445a-a8b5-745f71c7cb3f/barbican-worker/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.438836 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-65744cb55-qz24w_483e0862-32c8-445a-a8b5-745f71c7cb3f/barbican-worker-log/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.529188 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-6bcs7_9ed39ddf-9274-44fa-8267-59d9c8f1447a/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.776184 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fa9b64a3-41ee-4892-92a8-4d404c1545fc/ceilometer-central-agent/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.817648 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fa9b64a3-41ee-4892-92a8-4d404c1545fc/proxy-httpd/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.827714 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fa9b64a3-41ee-4892-92a8-4d404c1545fc/ceilometer-notification-agent/0.log" Dec 05 13:52:02 crc kubenswrapper[4711]: I1205 13:52:02.940811 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fa9b64a3-41ee-4892-92a8-4d404c1545fc/sg-core/0.log" Dec 05 13:52:03 crc kubenswrapper[4711]: I1205 13:52:03.092286 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0bfa64cd-4859-46f6-9261-8c7f4e63ccfd/cinder-api-log/0.log" Dec 05 13:52:03 crc kubenswrapper[4711]: I1205 13:52:03.372718 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_ad25585d-a381-43fc-8254-91ed6b58e1da/probe/0.log" Dec 05 13:52:03 crc kubenswrapper[4711]: I1205 13:52:03.650473 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_ed1a3b33-3fb6-412a-8bde-03171358617c/cinder-scheduler/0.log" Dec 05 13:52:03 crc kubenswrapper[4711]: I1205 13:52:03.653803 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_ad25585d-a381-43fc-8254-91ed6b58e1da/cinder-backup/0.log" Dec 05 13:52:03 crc kubenswrapper[4711]: I1205 13:52:03.698590 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0bfa64cd-4859-46f6-9261-8c7f4e63ccfd/cinder-api/0.log" Dec 05 13:52:03 crc kubenswrapper[4711]: I1205 13:52:03.752281 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_ed1a3b33-3fb6-412a-8bde-03171358617c/probe/0.log" Dec 05 13:52:03 crc kubenswrapper[4711]: I1205 13:52:03.932905 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_41cbfd27-4dc3-4989-8e6d-518ebc6c95c6/probe/0.log" Dec 05 13:52:04 crc kubenswrapper[4711]: I1205 13:52:04.050369 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_41cbfd27-4dc3-4989-8e6d-518ebc6c95c6/cinder-volume/0.log" Dec 05 13:52:04 crc kubenswrapper[4711]: I1205 13:52:04.217193 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_73f78a38-93dc-4cf3-9d10-51ff489c6aa5/probe/0.log" Dec 05 13:52:04 crc kubenswrapper[4711]: I1205 13:52:04.283254 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-72hkc_cc20675d-9302-43d3-8faf-74a8bbd8f752/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:04 crc kubenswrapper[4711]: I1205 13:52:04.373329 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_73f78a38-93dc-4cf3-9d10-51ff489c6aa5/cinder-volume/0.log" Dec 05 13:52:04 crc kubenswrapper[4711]: I1205 13:52:04.529427 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-w64bv_fa005b6f-fb79-4a1f-be0e-ec72a8680996/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:04 crc kubenswrapper[4711]: I1205 13:52:04.606222 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6c85bff75-jnh4g_54f1fb07-feb6-4d6e-8264-f74beb20b77e/init/0.log" Dec 05 13:52:04 crc kubenswrapper[4711]: I1205 13:52:04.753219 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6c85bff75-jnh4g_54f1fb07-feb6-4d6e-8264-f74beb20b77e/init/0.log" Dec 05 13:52:05 crc kubenswrapper[4711]: I1205 13:52:05.171205 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-pt2z8_35f05c7f-bc76-4f95-8b4e-dc37a544c8e0/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:05 crc kubenswrapper[4711]: I1205 13:52:05.178981 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_d707502b-314c-43db-8347-fd6db2e3280e/glance-httpd/0.log" Dec 05 13:52:05 crc kubenswrapper[4711]: I1205 13:52:05.237247 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_d707502b-314c-43db-8347-fd6db2e3280e/glance-log/0.log" Dec 05 13:52:05 crc kubenswrapper[4711]: I1205 13:52:05.361828 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6c85bff75-jnh4g_54f1fb07-feb6-4d6e-8264-f74beb20b77e/dnsmasq-dns/0.log" Dec 05 13:52:05 crc kubenswrapper[4711]: I1205 13:52:05.414983 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8428680c-7087-4734-9868-462045c17653/glance-httpd/0.log" Dec 05 13:52:05 crc kubenswrapper[4711]: I1205 13:52:05.516584 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8428680c-7087-4734-9868-462045c17653/glance-log/0.log" Dec 05 13:52:05 crc kubenswrapper[4711]: I1205 13:52:05.877581 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6856c6c898-9lzvt_ba5de8d4-e693-4431-aee3-0ba498f62e8e/horizon/0.log" Dec 05 13:52:06 crc kubenswrapper[4711]: I1205 13:52:06.019543 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-9m645_8729bdd8-ba47-4ba5-9ff4-71ec183635e7/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:06 crc kubenswrapper[4711]: I1205 13:52:06.123519 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-phtdt_f8d11d11-1dd3-475f-8715-54ce9afc0c18/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:06 crc kubenswrapper[4711]: I1205 13:52:06.341464 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415661-qkrkm_80663af4-fba3-4f84-b9cf-687b8f08501a/keystone-cron/0.log" Dec 05 13:52:06 crc kubenswrapper[4711]: I1205 13:52:06.514213 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_6f039111-5426-4f30-8101-1629afd4c8dc/kube-state-metrics/0.log" Dec 05 13:52:06 crc kubenswrapper[4711]: I1205 13:52:06.660403 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-kd9g2_2edaac33-536e-4fe8-9579-236b42229841/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:06 crc kubenswrapper[4711]: I1205 13:52:06.795301 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6856c6c898-9lzvt_ba5de8d4-e693-4431-aee3-0ba498f62e8e/horizon-log/0.log" Dec 05 13:52:06 crc kubenswrapper[4711]: I1205 13:52:06.853259 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-545d7cb86d-bpnk9_d8337ca9-f920-4f08-b321-15a6b9290a76/keystone-api/0.log" Dec 05 13:52:07 crc kubenswrapper[4711]: I1205 13:52:07.209958 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-ldxhn_8942d3ba-8261-4115-a071-6621ab696423/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:07 crc kubenswrapper[4711]: I1205 13:52:07.376994 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b64c669fc-xv225_4fa030c6-14a5-4924-a293-cfa089e98f54/neutron-httpd/0.log" Dec 05 13:52:07 crc kubenswrapper[4711]: I1205 13:52:07.377029 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b64c669fc-xv225_4fa030c6-14a5-4924-a293-cfa089e98f54/neutron-api/0.log" Dec 05 13:52:07 crc kubenswrapper[4711]: I1205 13:52:07.987705 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_728a3b65-c3e7-4ebf-b8b7-b1483643e4de/nova-cell0-conductor-conductor/0.log" Dec 05 13:52:08 crc kubenswrapper[4711]: I1205 13:52:08.246348 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_739542f3-a693-4b21-9f9c-dba893c7b3f0/nova-cell1-conductor-conductor/0.log" Dec 05 13:52:08 crc kubenswrapper[4711]: I1205 13:52:08.624630 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_889e0ed2-a76b-42dd-901e-ff2707f8443d/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 13:52:08 crc kubenswrapper[4711]: I1205 13:52:08.817473 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-2zlb6_a90b9716-a1b0-4a0b-9fc8-6ca9358a9144/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:09 crc kubenswrapper[4711]: I1205 13:52:09.043378 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_da6eb04a-87a2-4470-b96a-97e7bbc52a4a/nova-api-log/0.log" Dec 05 13:52:09 crc kubenswrapper[4711]: I1205 13:52:09.203284 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_abe3c847-4536-4e00-943c-14ecbb8f9600/nova-metadata-log/0.log" Dec 05 13:52:09 crc kubenswrapper[4711]: I1205 13:52:09.548275 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_da6eb04a-87a2-4470-b96a-97e7bbc52a4a/nova-api-api/0.log" Dec 05 13:52:09 crc kubenswrapper[4711]: I1205 13:52:09.705571 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e43a6d5b-867b-4aee-b8bd-37f5ef5d92b9/nova-scheduler-scheduler/0.log" Dec 05 13:52:09 crc kubenswrapper[4711]: I1205 13:52:09.725631 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6768dcf2-c875-4467-9da9-3857b2fdb2e3/mysql-bootstrap/0.log" Dec 05 13:52:09 crc kubenswrapper[4711]: I1205 13:52:09.907332 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6768dcf2-c875-4467-9da9-3857b2fdb2e3/mysql-bootstrap/0.log" Dec 05 13:52:09 crc kubenswrapper[4711]: I1205 13:52:09.955050 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6768dcf2-c875-4467-9da9-3857b2fdb2e3/galera/0.log" Dec 05 13:52:10 crc kubenswrapper[4711]: I1205 13:52:10.097845 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_feb938be-1a43-402f-8373-47a6c9217d9c/mysql-bootstrap/0.log" Dec 05 13:52:10 crc kubenswrapper[4711]: I1205 13:52:10.369851 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_feb938be-1a43-402f-8373-47a6c9217d9c/mysql-bootstrap/0.log" Dec 05 13:52:10 crc kubenswrapper[4711]: I1205 13:52:10.387685 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_feb938be-1a43-402f-8373-47a6c9217d9c/galera/0.log" Dec 05 13:52:10 crc kubenswrapper[4711]: I1205 13:52:10.590916 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_b6bcabfd-50fc-4e19-98ff-9c4f03eb5953/openstackclient/0.log" Dec 05 13:52:10 crc kubenswrapper[4711]: I1205 13:52:10.649067 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4wg9n_13a40f45-a612-477e-b883-94012252a457/ovn-controller/0.log" Dec 05 13:52:10 crc kubenswrapper[4711]: I1205 13:52:10.853955 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-5b8rk_1939bf8a-af4b-40df-b6a3-390a44292cfb/openstack-network-exporter/0.log" Dec 05 13:52:11 crc kubenswrapper[4711]: I1205 13:52:11.077475 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-28s5p_0a1cbd77-9586-4e37-a172-cfe7ecda6c72/ovsdb-server-init/0.log" Dec 05 13:52:11 crc kubenswrapper[4711]: I1205 13:52:11.252114 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-28s5p_0a1cbd77-9586-4e37-a172-cfe7ecda6c72/ovsdb-server-init/0.log" Dec 05 13:52:11 crc kubenswrapper[4711]: I1205 13:52:11.338919 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-28s5p_0a1cbd77-9586-4e37-a172-cfe7ecda6c72/ovsdb-server/0.log" Dec 05 13:52:11 crc kubenswrapper[4711]: I1205 13:52:11.601770 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-mchpg_4e0f0d4c-40ae-4f75-ba7a-e68edad4c5fc/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:11 crc kubenswrapper[4711]: I1205 13:52:11.740653 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-28s5p_0a1cbd77-9586-4e37-a172-cfe7ecda6c72/ovs-vswitchd/0.log" Dec 05 13:52:11 crc kubenswrapper[4711]: I1205 13:52:11.783503 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_2330b767-24ec-4c55-9458-73ff85a96bc7/openstack-network-exporter/0.log" Dec 05 13:52:11 crc kubenswrapper[4711]: I1205 13:52:11.849029 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_abe3c847-4536-4e00-943c-14ecbb8f9600/nova-metadata-metadata/0.log" Dec 05 13:52:11 crc kubenswrapper[4711]: I1205 13:52:11.940323 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_2330b767-24ec-4c55-9458-73ff85a96bc7/ovn-northd/0.log" Dec 05 13:52:11 crc kubenswrapper[4711]: I1205 13:52:11.991561 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_2a1605ec-ad5c-4113-ac53-b8cf93bd5063/openstack-network-exporter/0.log" Dec 05 13:52:12 crc kubenswrapper[4711]: I1205 13:52:12.117686 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_2a1605ec-ad5c-4113-ac53-b8cf93bd5063/ovsdbserver-nb/0.log" Dec 05 13:52:12 crc kubenswrapper[4711]: I1205 13:52:12.447622 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_9d666928-c46a-4204-916c-231a43e82047/openstack-network-exporter/0.log" Dec 05 13:52:12 crc kubenswrapper[4711]: I1205 13:52:12.594209 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_9d666928-c46a-4204-916c-231a43e82047/ovsdbserver-sb/0.log" Dec 05 13:52:12 crc kubenswrapper[4711]: I1205 13:52:12.880109 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6/init-config-reloader/0.log" Dec 05 13:52:12 crc kubenswrapper[4711]: I1205 13:52:12.896772 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d94746d9d-m4k6w_1aa31398-4345-4fca-bbe3-4682d082c3d7/placement-api/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.046624 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d94746d9d-m4k6w_1aa31398-4345-4fca-bbe3-4682d082c3d7/placement-log/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.131809 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6/init-config-reloader/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.132752 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6/config-reloader/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.186682 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6/prometheus/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.296227 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5a1a6ef4-0684-4c2e-9200-ed5ffb27cdc6/thanos-sidecar/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.365732 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1719e95a-bfa1-4302-b7f5-08acd0d41d93/setup-container/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.648247 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1719e95a-bfa1-4302-b7f5-08acd0d41d93/rabbitmq/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.660308 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1719e95a-bfa1-4302-b7f5-08acd0d41d93/setup-container/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.703184 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_d1b49e15-30ab-4ef7-8980-436468104f7b/setup-container/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.862361 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_d1b49e15-30ab-4ef7-8980-436468104f7b/rabbitmq/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.944958 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_314912fb-0c68-4fc7-9472-f84b1f0ab8cd/setup-container/0.log" Dec 05 13:52:13 crc kubenswrapper[4711]: I1205 13:52:13.948157 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_d1b49e15-30ab-4ef7-8980-436468104f7b/setup-container/0.log" Dec 05 13:52:14 crc kubenswrapper[4711]: I1205 13:52:14.169612 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_314912fb-0c68-4fc7-9472-f84b1f0ab8cd/setup-container/0.log" Dec 05 13:52:14 crc kubenswrapper[4711]: I1205 13:52:14.211555 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_314912fb-0c68-4fc7-9472-f84b1f0ab8cd/rabbitmq/0.log" Dec 05 13:52:14 crc kubenswrapper[4711]: I1205 13:52:14.224465 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-w2l5r_8bd300af-c4e6-4641-a598-0ab3af20c754/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:14 crc kubenswrapper[4711]: I1205 13:52:14.485110 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-cs9sx_4c833386-c174-487a-8044-9eb7c8fc7cb0/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:14 crc kubenswrapper[4711]: I1205 13:52:14.504992 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-jjhp9_91e39369-6074-48c3-bb1f-4b2d8b92243e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:14 crc kubenswrapper[4711]: I1205 13:52:14.711508 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-n6cnz_6f0b9185-75d5-4e86-9c18-211ea2a8f3dc/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:14 crc kubenswrapper[4711]: I1205 13:52:14.790054 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-6kgdc_80bc6810-0d4b-430b-b96a-6606cc41d7b9/ssh-known-hosts-edpm-deployment/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.066962 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5d757fb76c-sptb4_c319c3a5-f67a-47d7-bfe3-8e874cf01471/proxy-server/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.154525 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-2jbtw_64b58c93-4cf7-4623-ac23-b01d1fb62fa4/swift-ring-rebalance/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.283070 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/account-auditor/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.310752 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5d757fb76c-sptb4_c319c3a5-f67a-47d7-bfe3-8e874cf01471/proxy-httpd/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.351534 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/account-reaper/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.511223 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/account-replicator/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.514411 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/container-auditor/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.567614 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/account-server/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.615782 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/container-replicator/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.663575 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/container-server/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.820740 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/object-expirer/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.828809 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/container-updater/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.834460 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/object-auditor/0.log" Dec 05 13:52:15 crc kubenswrapper[4711]: I1205 13:52:15.924066 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/object-replicator/0.log" Dec 05 13:52:16 crc kubenswrapper[4711]: I1205 13:52:16.026672 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/object-server/0.log" Dec 05 13:52:16 crc kubenswrapper[4711]: I1205 13:52:16.047475 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/object-updater/0.log" Dec 05 13:52:16 crc kubenswrapper[4711]: I1205 13:52:16.056022 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/rsync/0.log" Dec 05 13:52:16 crc kubenswrapper[4711]: I1205 13:52:16.199531 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_421d5855-985f-4d7f-9faf-c868088a7291/swift-recon-cron/0.log" Dec 05 13:52:16 crc kubenswrapper[4711]: I1205 13:52:16.325713 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-6flsn_e5532dde-9201-42f7-bc4c-4837ad84aa24/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:16 crc kubenswrapper[4711]: I1205 13:52:16.349668 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_3be4e2c7-9acc-4491-a349-0bc788db0e9e/memcached/0.log" Dec 05 13:52:16 crc kubenswrapper[4711]: I1205 13:52:16.526412 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_5bcd6daa-830a-4c2b-b4e7-b45d3719ab64/test-operator-logs-container/0.log" Dec 05 13:52:16 crc kubenswrapper[4711]: I1205 13:52:16.570087 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-j82gv_e56926d2-8bf7-4142-91eb-470bd969cbd3/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 13:52:16 crc kubenswrapper[4711]: I1205 13:52:16.960602 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_745cbd1d-0e83-42d6-b6b4-b57638936898/tempest-tests-tempest-tests-runner/0.log" Dec 05 13:52:17 crc kubenswrapper[4711]: I1205 13:52:17.447490 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_4e6cdb19-ee98-47d7-9b19-9edfb9fe3907/watcher-applier/0.log" Dec 05 13:52:18 crc kubenswrapper[4711]: I1205 13:52:18.300353 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:52:18 crc kubenswrapper[4711]: I1205 13:52:18.300620 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:52:18 crc kubenswrapper[4711]: I1205 13:52:18.325708 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_96d9fdfa-55ca-4955-a585-b730c58b8e6f/watcher-api-log/0.log" Dec 05 13:52:20 crc kubenswrapper[4711]: I1205 13:52:20.958041 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_560f4e5b-3ebe-4357-ad7d-d3fb4912b63f/watcher-decision-engine/0.log" Dec 05 13:52:22 crc kubenswrapper[4711]: I1205 13:52:22.071711 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_96d9fdfa-55ca-4955-a585-b730c58b8e6f/watcher-api/0.log" Dec 05 13:52:41 crc kubenswrapper[4711]: I1205 13:52:41.820728 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj_9aa1bdf3-cb54-4fbc-8ae3-56015757ca26/util/0.log" Dec 05 13:52:41 crc kubenswrapper[4711]: I1205 13:52:41.986309 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj_9aa1bdf3-cb54-4fbc-8ae3-56015757ca26/pull/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.013353 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj_9aa1bdf3-cb54-4fbc-8ae3-56015757ca26/util/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.019209 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj_9aa1bdf3-cb54-4fbc-8ae3-56015757ca26/pull/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.182541 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj_9aa1bdf3-cb54-4fbc-8ae3-56015757ca26/pull/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.186798 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj_9aa1bdf3-cb54-4fbc-8ae3-56015757ca26/extract/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.226532 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2aca67bceaf5f8f951e258252dcf1c8090dc81397508fd629a1381c43fhzswj_9aa1bdf3-cb54-4fbc-8ae3-56015757ca26/util/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.375470 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-4vr8d_76fdb364-88f3-4033-8318-353b66329f32/kube-rbac-proxy/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.450310 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-4vr8d_76fdb364-88f3-4033-8318-353b66329f32/manager/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.450359 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-mdk4g_f6b616b6-23f5-4671-8d91-cc11317f07a6/kube-rbac-proxy/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.617727 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-mdk4g_f6b616b6-23f5-4671-8d91-cc11317f07a6/manager/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.641667 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-jxmwb_bd9c2a02-45a9-444e-b026-18f6f632d157/manager/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.692629 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-jxmwb_bd9c2a02-45a9-444e-b026-18f6f632d157/kube-rbac-proxy/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.830888 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-z9pxv_81f3007f-2841-4b35-b36d-7527ad69da4f/kube-rbac-proxy/0.log" Dec 05 13:52:42 crc kubenswrapper[4711]: I1205 13:52:42.892928 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-z9pxv_81f3007f-2841-4b35-b36d-7527ad69da4f/manager/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.018908 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-4nlqw_61d6cab9-9cd8-443b-ba0f-90de0670366b/kube-rbac-proxy/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.027055 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-4nlqw_61d6cab9-9cd8-443b-ba0f-90de0670366b/manager/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.076570 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-wl765_1647c9ea-6d82-4f30-9641-25f10d54fbc6/kube-rbac-proxy/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.227917 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-wl765_1647c9ea-6d82-4f30-9641-25f10d54fbc6/manager/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.292116 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-8b94l_162ed24d-7f1a-43d3-a543-84a19891bcd0/kube-rbac-proxy/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.438802 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-gpstg_0ee16c2f-bb8c-495c-a007-41444751c118/kube-rbac-proxy/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.482084 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-8b94l_162ed24d-7f1a-43d3-a543-84a19891bcd0/manager/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.550950 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-gpstg_0ee16c2f-bb8c-495c-a007-41444751c118/manager/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.652429 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-fs4fj_aef68c5a-5fdd-47dd-8205-8d39019e124e/kube-rbac-proxy/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.776955 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-fs4fj_aef68c5a-5fdd-47dd-8205-8d39019e124e/manager/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.841131 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-b2tj4_2fbc7dde-da0b-48ab-9af3-0c023a9f446b/manager/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.868295 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-b2tj4_2fbc7dde-da0b-48ab-9af3-0c023a9f446b/kube-rbac-proxy/0.log" Dec 05 13:52:43 crc kubenswrapper[4711]: I1205 13:52:43.978115 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-glz4l_fd19ef58-a657-4733-bc5f-0917ea66ee3b/kube-rbac-proxy/0.log" Dec 05 13:52:44 crc kubenswrapper[4711]: I1205 13:52:44.073851 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-glz4l_fd19ef58-a657-4733-bc5f-0917ea66ee3b/manager/0.log" Dec 05 13:52:44 crc kubenswrapper[4711]: I1205 13:52:44.187809 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-87csw_c5aa315b-0c36-4fba-a6a0-69cc18d6f21f/kube-rbac-proxy/0.log" Dec 05 13:52:44 crc kubenswrapper[4711]: I1205 13:52:44.227466 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-87csw_c5aa315b-0c36-4fba-a6a0-69cc18d6f21f/manager/0.log" Dec 05 13:52:44 crc kubenswrapper[4711]: I1205 13:52:44.350618 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-fs7kh_7aee5f79-d3e2-4f7b-9047-d2ca4a048c00/kube-rbac-proxy/0.log" Dec 05 13:52:44 crc kubenswrapper[4711]: I1205 13:52:44.467880 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-fs7kh_7aee5f79-d3e2-4f7b-9047-d2ca4a048c00/manager/0.log" Dec 05 13:52:44 crc kubenswrapper[4711]: I1205 13:52:44.515033 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-mmwc9_c900de25-1ca3-4a0f-8485-c0e7d1b05f12/kube-rbac-proxy/0.log" Dec 05 13:52:44 crc kubenswrapper[4711]: I1205 13:52:44.592934 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-mmwc9_c900de25-1ca3-4a0f-8485-c0e7d1b05f12/manager/0.log" Dec 05 13:52:44 crc kubenswrapper[4711]: I1205 13:52:44.662046 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m_1b0cf425-4752-41b6-9a30-862e38015368/kube-rbac-proxy/0.log" Dec 05 13:52:44 crc kubenswrapper[4711]: I1205 13:52:44.703316 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4h9v2m_1b0cf425-4752-41b6-9a30-862e38015368/manager/0.log" Dec 05 13:52:45 crc kubenswrapper[4711]: I1205 13:52:45.130745 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-4ktx2_6de819ff-74b1-4645-a34c-434e3ac6eb60/registry-server/0.log" Dec 05 13:52:45 crc kubenswrapper[4711]: I1205 13:52:45.158488 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-574d9f8c97-k2tqq_2adcea3c-2abd-421f-adf0-a0e30b5873fd/operator/0.log" Dec 05 13:52:45 crc kubenswrapper[4711]: I1205 13:52:45.371016 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-vd4nf_d036d428-1c3b-4a5c-b071-04c3270d8e0d/kube-rbac-proxy/0.log" Dec 05 13:52:45 crc kubenswrapper[4711]: I1205 13:52:45.536182 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-vd4nf_d036d428-1c3b-4a5c-b071-04c3270d8e0d/manager/0.log" Dec 05 13:52:45 crc kubenswrapper[4711]: I1205 13:52:45.649850 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-sjdwh_50fdc7e4-c9d8-487f-827b-5e087aebdcb0/kube-rbac-proxy/0.log" Dec 05 13:52:45 crc kubenswrapper[4711]: I1205 13:52:45.757684 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-sjdwh_50fdc7e4-c9d8-487f-827b-5e087aebdcb0/manager/0.log" Dec 05 13:52:45 crc kubenswrapper[4711]: I1205 13:52:45.912152 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-tj9mg_e1047891-b091-4202-a4fc-38abe49ced8c/operator/0.log" Dec 05 13:52:46 crc kubenswrapper[4711]: I1205 13:52:46.002739 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-c989k_1651f201-08ba-4dea-88ff-680d91d475d3/kube-rbac-proxy/0.log" Dec 05 13:52:46 crc kubenswrapper[4711]: I1205 13:52:46.058772 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-c989k_1651f201-08ba-4dea-88ff-680d91d475d3/manager/0.log" Dec 05 13:52:46 crc kubenswrapper[4711]: I1205 13:52:46.168719 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-nzkbm_52651fb6-d860-4d58-9084-8d2c8dc49529/kube-rbac-proxy/0.log" Dec 05 13:52:46 crc kubenswrapper[4711]: I1205 13:52:46.322761 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-fcb8bd8db-wg5k4_09a6ffec-d739-4b72-8b6b-83609ed4f571/manager/0.log" Dec 05 13:52:46 crc kubenswrapper[4711]: I1205 13:52:46.389611 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-vvnrf_8bda4a64-08b2-4c48-b0c6-bfb094b7b985/kube-rbac-proxy/0.log" Dec 05 13:52:46 crc kubenswrapper[4711]: I1205 13:52:46.456596 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-vvnrf_8bda4a64-08b2-4c48-b0c6-bfb094b7b985/manager/0.log" Dec 05 13:52:46 crc kubenswrapper[4711]: I1205 13:52:46.472968 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-nzkbm_52651fb6-d860-4d58-9084-8d2c8dc49529/manager/0.log" Dec 05 13:52:46 crc kubenswrapper[4711]: I1205 13:52:46.644152 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6c9545865c-7z2lk_85bb9f84-9769-4960-83a3-00f1a8fb9851/manager/0.log" Dec 05 13:52:46 crc kubenswrapper[4711]: I1205 13:52:46.645174 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6c9545865c-7z2lk_85bb9f84-9769-4960-83a3-00f1a8fb9851/kube-rbac-proxy/0.log" Dec 05 13:52:48 crc kubenswrapper[4711]: I1205 13:52:48.301517 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:52:48 crc kubenswrapper[4711]: I1205 13:52:48.301894 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:52:48 crc kubenswrapper[4711]: I1205 13:52:48.301942 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:52:48 crc kubenswrapper[4711]: I1205 13:52:48.302846 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1244bad30887d8c80c8f4234338b30be7407f0295c05bb01ca7c173b5ecec36d"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:52:48 crc kubenswrapper[4711]: I1205 13:52:48.302911 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://1244bad30887d8c80c8f4234338b30be7407f0295c05bb01ca7c173b5ecec36d" gracePeriod=600 Dec 05 13:52:48 crc kubenswrapper[4711]: I1205 13:52:48.917076 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="1244bad30887d8c80c8f4234338b30be7407f0295c05bb01ca7c173b5ecec36d" exitCode=0 Dec 05 13:52:48 crc kubenswrapper[4711]: I1205 13:52:48.917122 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"1244bad30887d8c80c8f4234338b30be7407f0295c05bb01ca7c173b5ecec36d"} Dec 05 13:52:48 crc kubenswrapper[4711]: I1205 13:52:48.917152 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12"} Dec 05 13:52:48 crc kubenswrapper[4711]: I1205 13:52:48.917185 4711 scope.go:117] "RemoveContainer" containerID="7dd496b432334fa7863bc4586fd7d2993cf438f3b5b02dc3f7e003c4e8087058" Dec 05 13:53:07 crc kubenswrapper[4711]: I1205 13:53:07.102704 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-xvdpr_50f42111-4c58-408b-b2be-f739d494ef28/control-plane-machine-set-operator/0.log" Dec 05 13:53:07 crc kubenswrapper[4711]: I1205 13:53:07.309326 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-plvn8_7c4e1e97-a3ae-414f-b12f-8b2463478934/kube-rbac-proxy/0.log" Dec 05 13:53:07 crc kubenswrapper[4711]: I1205 13:53:07.336450 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-plvn8_7c4e1e97-a3ae-414f-b12f-8b2463478934/machine-api-operator/0.log" Dec 05 13:53:19 crc kubenswrapper[4711]: I1205 13:53:19.010198 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-p68lq_d2ea3803-e172-4380-81eb-1027f1a667fa/cert-manager-controller/0.log" Dec 05 13:53:19 crc kubenswrapper[4711]: I1205 13:53:19.135916 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-95xvt_34883f4c-a75b-4298-bd5b-ecf0077a79c6/cert-manager-cainjector/0.log" Dec 05 13:53:19 crc kubenswrapper[4711]: I1205 13:53:19.191443 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-x5xls_38c17a5b-f6a0-4cbd-96dd-c95068e5e844/cert-manager-webhook/0.log" Dec 05 13:53:30 crc kubenswrapper[4711]: I1205 13:53:30.726638 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-4cxfd_b56e7dfc-5e5e-48ac-949e-3b4704532748/nmstate-console-plugin/0.log" Dec 05 13:53:30 crc kubenswrapper[4711]: I1205 13:53:30.858498 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-j7clc_4b791e02-e955-4407-a42e-98eedbff8135/nmstate-handler/0.log" Dec 05 13:53:30 crc kubenswrapper[4711]: I1205 13:53:30.897651 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-pnfb8_c35b2a97-230a-483e-8321-fa0b7bd8c593/kube-rbac-proxy/0.log" Dec 05 13:53:30 crc kubenswrapper[4711]: I1205 13:53:30.904426 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-pnfb8_c35b2a97-230a-483e-8321-fa0b7bd8c593/nmstate-metrics/0.log" Dec 05 13:53:31 crc kubenswrapper[4711]: I1205 13:53:31.042659 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-mpk6s_27ec6a7b-d939-47c8-9fa4-0fa976c78cfc/nmstate-operator/0.log" Dec 05 13:53:31 crc kubenswrapper[4711]: I1205 13:53:31.107405 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-tlcnm_58574466-bffd-4755-b6fb-09380f829468/nmstate-webhook/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.097106 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-nhn9s_30154788-eb0d-42a1-8f53-a1cb1b0cdb8f/kube-rbac-proxy/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.135740 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-nhn9s_30154788-eb0d-42a1-8f53-a1cb1b0cdb8f/controller/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.268783 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-frr-files/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.461348 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-reloader/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.472931 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-frr-files/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.473362 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-metrics/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.515958 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-reloader/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.746748 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-metrics/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.748324 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-frr-files/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.781722 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-metrics/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.782864 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-reloader/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.955410 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-reloader/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.970431 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-frr-files/0.log" Dec 05 13:53:45 crc kubenswrapper[4711]: I1205 13:53:45.981403 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/cp-metrics/0.log" Dec 05 13:53:46 crc kubenswrapper[4711]: I1205 13:53:46.005577 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/controller/0.log" Dec 05 13:53:46 crc kubenswrapper[4711]: I1205 13:53:46.162838 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/kube-rbac-proxy/0.log" Dec 05 13:53:46 crc kubenswrapper[4711]: I1205 13:53:46.230176 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/frr-metrics/0.log" Dec 05 13:53:46 crc kubenswrapper[4711]: I1205 13:53:46.259898 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/kube-rbac-proxy-frr/0.log" Dec 05 13:53:46 crc kubenswrapper[4711]: I1205 13:53:46.422515 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/reloader/0.log" Dec 05 13:53:46 crc kubenswrapper[4711]: I1205 13:53:46.562325 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-2wtps_a5d2949f-9c51-4955-8782-150e18eca73b/frr-k8s-webhook-server/0.log" Dec 05 13:53:46 crc kubenswrapper[4711]: I1205 13:53:46.791587 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7d9fb8bd49-lwf8k_ec342f47-c728-4826-8af2-576ec13046bf/manager/0.log" Dec 05 13:53:46 crc kubenswrapper[4711]: I1205 13:53:46.884489 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-545d776cd8-hwnls_8c3c5cf0-56f2-407a-99dc-c8abbc298527/webhook-server/0.log" Dec 05 13:53:47 crc kubenswrapper[4711]: I1205 13:53:47.062122 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-gfr4h_d5bb0d55-e7da-466b-be2d-59b2d74553fe/kube-rbac-proxy/0.log" Dec 05 13:53:47 crc kubenswrapper[4711]: I1205 13:53:47.636970 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-gfr4h_d5bb0d55-e7da-466b-be2d-59b2d74553fe/speaker/0.log" Dec 05 13:53:47 crc kubenswrapper[4711]: I1205 13:53:47.951779 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsfp8_b02b43b5-3133-49d0-bbbb-3d6aec73c79e/frr/0.log" Dec 05 13:54:01 crc kubenswrapper[4711]: I1205 13:54:01.410179 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l_d585a412-c9fb-45d1-a0ef-e26b9fc1ce84/util/0.log" Dec 05 13:54:01 crc kubenswrapper[4711]: I1205 13:54:01.667141 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l_d585a412-c9fb-45d1-a0ef-e26b9fc1ce84/util/0.log" Dec 05 13:54:01 crc kubenswrapper[4711]: I1205 13:54:01.684486 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l_d585a412-c9fb-45d1-a0ef-e26b9fc1ce84/pull/0.log" Dec 05 13:54:01 crc kubenswrapper[4711]: I1205 13:54:01.684912 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l_d585a412-c9fb-45d1-a0ef-e26b9fc1ce84/pull/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.135629 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l_d585a412-c9fb-45d1-a0ef-e26b9fc1ce84/extract/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.154877 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l_d585a412-c9fb-45d1-a0ef-e26b9fc1ce84/util/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.211996 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212frq67l_d585a412-c9fb-45d1-a0ef-e26b9fc1ce84/pull/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.371484 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz_18a13fe2-c30e-4b1f-8c32-dafc95e5000a/util/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.540415 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz_18a13fe2-c30e-4b1f-8c32-dafc95e5000a/pull/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.580373 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz_18a13fe2-c30e-4b1f-8c32-dafc95e5000a/util/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.607631 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz_18a13fe2-c30e-4b1f-8c32-dafc95e5000a/pull/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.795941 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz_18a13fe2-c30e-4b1f-8c32-dafc95e5000a/extract/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.813736 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz_18a13fe2-c30e-4b1f-8c32-dafc95e5000a/util/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.832632 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zvpgz_18a13fe2-c30e-4b1f-8c32-dafc95e5000a/pull/0.log" Dec 05 13:54:02 crc kubenswrapper[4711]: I1205 13:54:02.976474 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr_0821eaca-5689-41bb-99e0-d717bcafc885/util/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.169175 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr_0821eaca-5689-41bb-99e0-d717bcafc885/util/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.193444 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr_0821eaca-5689-41bb-99e0-d717bcafc885/pull/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.221875 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr_0821eaca-5689-41bb-99e0-d717bcafc885/pull/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.437723 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr_0821eaca-5689-41bb-99e0-d717bcafc885/util/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.454032 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr_0821eaca-5689-41bb-99e0-d717bcafc885/pull/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.465283 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83w5jmr_0821eaca-5689-41bb-99e0-d717bcafc885/extract/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.701930 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5wtcc_ec151a45-6363-4aab-8672-d13b8be04c7d/extract-utilities/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.882631 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5wtcc_ec151a45-6363-4aab-8672-d13b8be04c7d/extract-content/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.898506 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5wtcc_ec151a45-6363-4aab-8672-d13b8be04c7d/extract-content/0.log" Dec 05 13:54:03 crc kubenswrapper[4711]: I1205 13:54:03.934219 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5wtcc_ec151a45-6363-4aab-8672-d13b8be04c7d/extract-utilities/0.log" Dec 05 13:54:04 crc kubenswrapper[4711]: I1205 13:54:04.176236 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5wtcc_ec151a45-6363-4aab-8672-d13b8be04c7d/extract-content/0.log" Dec 05 13:54:04 crc kubenswrapper[4711]: I1205 13:54:04.432969 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5wtcc_ec151a45-6363-4aab-8672-d13b8be04c7d/extract-utilities/0.log" Dec 05 13:54:04 crc kubenswrapper[4711]: I1205 13:54:04.514114 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6r649_9a670e5d-c2a5-4d57-84a0-727c564f7325/extract-utilities/0.log" Dec 05 13:54:04 crc kubenswrapper[4711]: I1205 13:54:04.704944 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6r649_9a670e5d-c2a5-4d57-84a0-727c564f7325/extract-content/0.log" Dec 05 13:54:04 crc kubenswrapper[4711]: I1205 13:54:04.715277 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6r649_9a670e5d-c2a5-4d57-84a0-727c564f7325/extract-utilities/0.log" Dec 05 13:54:04 crc kubenswrapper[4711]: I1205 13:54:04.844436 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6r649_9a670e5d-c2a5-4d57-84a0-727c564f7325/extract-content/0.log" Dec 05 13:54:04 crc kubenswrapper[4711]: I1205 13:54:04.990740 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6r649_9a670e5d-c2a5-4d57-84a0-727c564f7325/extract-utilities/0.log" Dec 05 13:54:05 crc kubenswrapper[4711]: I1205 13:54:05.018497 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6r649_9a670e5d-c2a5-4d57-84a0-727c564f7325/extract-content/0.log" Dec 05 13:54:05 crc kubenswrapper[4711]: I1205 13:54:05.242619 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5wtcc_ec151a45-6363-4aab-8672-d13b8be04c7d/registry-server/0.log" Dec 05 13:54:05 crc kubenswrapper[4711]: I1205 13:54:05.263032 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-2hspf_cf774310-240f-4e72-9154-1321129d54dd/marketplace-operator/0.log" Dec 05 13:54:05 crc kubenswrapper[4711]: I1205 13:54:05.495577 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ks7xz_92da7542-202f-4d2e-b3a1-f300483f4523/extract-utilities/0.log" Dec 05 13:54:05 crc kubenswrapper[4711]: I1205 13:54:05.798949 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ks7xz_92da7542-202f-4d2e-b3a1-f300483f4523/extract-content/0.log" Dec 05 13:54:05 crc kubenswrapper[4711]: I1205 13:54:05.807767 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ks7xz_92da7542-202f-4d2e-b3a1-f300483f4523/extract-utilities/0.log" Dec 05 13:54:05 crc kubenswrapper[4711]: I1205 13:54:05.850037 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ks7xz_92da7542-202f-4d2e-b3a1-f300483f4523/extract-content/0.log" Dec 05 13:54:05 crc kubenswrapper[4711]: I1205 13:54:05.970227 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6r649_9a670e5d-c2a5-4d57-84a0-727c564f7325/registry-server/0.log" Dec 05 13:54:06 crc kubenswrapper[4711]: I1205 13:54:06.035555 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ks7xz_92da7542-202f-4d2e-b3a1-f300483f4523/extract-content/0.log" Dec 05 13:54:06 crc kubenswrapper[4711]: I1205 13:54:06.075978 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ks7xz_92da7542-202f-4d2e-b3a1-f300483f4523/extract-utilities/0.log" Dec 05 13:54:06 crc kubenswrapper[4711]: I1205 13:54:06.170983 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72h44_b2a49cc0-5d95-4923-a909-0bf5988aed8d/extract-utilities/0.log" Dec 05 13:54:06 crc kubenswrapper[4711]: I1205 13:54:06.274349 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ks7xz_92da7542-202f-4d2e-b3a1-f300483f4523/registry-server/0.log" Dec 05 13:54:06 crc kubenswrapper[4711]: I1205 13:54:06.381558 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72h44_b2a49cc0-5d95-4923-a909-0bf5988aed8d/extract-utilities/0.log" Dec 05 13:54:06 crc kubenswrapper[4711]: I1205 13:54:06.403452 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72h44_b2a49cc0-5d95-4923-a909-0bf5988aed8d/extract-content/0.log" Dec 05 13:54:06 crc kubenswrapper[4711]: I1205 13:54:06.445544 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72h44_b2a49cc0-5d95-4923-a909-0bf5988aed8d/extract-content/0.log" Dec 05 13:54:06 crc kubenswrapper[4711]: I1205 13:54:06.604829 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72h44_b2a49cc0-5d95-4923-a909-0bf5988aed8d/extract-utilities/0.log" Dec 05 13:54:06 crc kubenswrapper[4711]: I1205 13:54:06.629200 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72h44_b2a49cc0-5d95-4923-a909-0bf5988aed8d/extract-content/0.log" Dec 05 13:54:07 crc kubenswrapper[4711]: I1205 13:54:07.427576 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-72h44_b2a49cc0-5d95-4923-a909-0bf5988aed8d/registry-server/0.log" Dec 05 13:54:19 crc kubenswrapper[4711]: I1205 13:54:19.364807 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-g96b2_8d47e5b8-d669-4947-b439-eaa06a31187e/prometheus-operator/0.log" Dec 05 13:54:19 crc kubenswrapper[4711]: I1205 13:54:19.533841 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-c959d4656-ng96c_f7e16934-1f3b-4272-ac69-4bcc6eec482f/prometheus-operator-admission-webhook/0.log" Dec 05 13:54:19 crc kubenswrapper[4711]: I1205 13:54:19.783276 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-c959d4656-nhj4j_3edd2ed9-7a50-40f6-8a48-9398732a79cb/prometheus-operator-admission-webhook/0.log" Dec 05 13:54:19 crc kubenswrapper[4711]: I1205 13:54:19.899957 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-6lzpv_d56ccf44-6d74-47a4-93f2-d242ad353756/operator/0.log" Dec 05 13:54:19 crc kubenswrapper[4711]: I1205 13:54:19.983177 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-brnw9_0b910cd0-6b78-4758-a2b9-2a451b85ca3d/perses-operator/0.log" Dec 05 13:54:48 crc kubenswrapper[4711]: I1205 13:54:48.301046 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:54:48 crc kubenswrapper[4711]: I1205 13:54:48.301625 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.672163 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s7hkr"] Dec 05 13:55:04 crc kubenswrapper[4711]: E1205 13:55:04.673437 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c506990-a89e-4abd-923e-e762a209a808" containerName="container-00" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.673455 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c506990-a89e-4abd-923e-e762a209a808" containerName="container-00" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.673697 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c506990-a89e-4abd-923e-e762a209a808" containerName="container-00" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.675866 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.711966 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s7hkr"] Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.811030 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-utilities\") pod \"community-operators-s7hkr\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.811121 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9f272\" (UniqueName: \"kubernetes.io/projected/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-kube-api-access-9f272\") pod \"community-operators-s7hkr\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.811222 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-catalog-content\") pod \"community-operators-s7hkr\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.913070 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9f272\" (UniqueName: \"kubernetes.io/projected/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-kube-api-access-9f272\") pod \"community-operators-s7hkr\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.913175 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-catalog-content\") pod \"community-operators-s7hkr\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.913257 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-utilities\") pod \"community-operators-s7hkr\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.913902 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-utilities\") pod \"community-operators-s7hkr\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.913966 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-catalog-content\") pod \"community-operators-s7hkr\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.947284 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9f272\" (UniqueName: \"kubernetes.io/projected/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-kube-api-access-9f272\") pod \"community-operators-s7hkr\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:04 crc kubenswrapper[4711]: I1205 13:55:04.998579 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:05 crc kubenswrapper[4711]: I1205 13:55:05.625973 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s7hkr"] Dec 05 13:55:05 crc kubenswrapper[4711]: W1205 13:55:05.631410 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b914bf5_662a_4c7e_8e0c_01db6bb69e82.slice/crio-483457be5acb68cb8f35bef47d68a6eec7884440fc3421a86992d6c67ec230a0 WatchSource:0}: Error finding container 483457be5acb68cb8f35bef47d68a6eec7884440fc3421a86992d6c67ec230a0: Status 404 returned error can't find the container with id 483457be5acb68cb8f35bef47d68a6eec7884440fc3421a86992d6c67ec230a0 Dec 05 13:55:06 crc kubenswrapper[4711]: I1205 13:55:06.260550 4711 generic.go:334] "Generic (PLEG): container finished" podID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerID="67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7" exitCode=0 Dec 05 13:55:06 crc kubenswrapper[4711]: I1205 13:55:06.260848 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7hkr" event={"ID":"1b914bf5-662a-4c7e-8e0c-01db6bb69e82","Type":"ContainerDied","Data":"67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7"} Dec 05 13:55:06 crc kubenswrapper[4711]: I1205 13:55:06.260878 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7hkr" event={"ID":"1b914bf5-662a-4c7e-8e0c-01db6bb69e82","Type":"ContainerStarted","Data":"483457be5acb68cb8f35bef47d68a6eec7884440fc3421a86992d6c67ec230a0"} Dec 05 13:55:06 crc kubenswrapper[4711]: I1205 13:55:06.262722 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:55:08 crc kubenswrapper[4711]: I1205 13:55:08.282058 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7hkr" event={"ID":"1b914bf5-662a-4c7e-8e0c-01db6bb69e82","Type":"ContainerStarted","Data":"c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5"} Dec 05 13:55:09 crc kubenswrapper[4711]: I1205 13:55:09.291267 4711 generic.go:334] "Generic (PLEG): container finished" podID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerID="c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5" exitCode=0 Dec 05 13:55:09 crc kubenswrapper[4711]: I1205 13:55:09.291445 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7hkr" event={"ID":"1b914bf5-662a-4c7e-8e0c-01db6bb69e82","Type":"ContainerDied","Data":"c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5"} Dec 05 13:55:10 crc kubenswrapper[4711]: I1205 13:55:10.320595 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7hkr" event={"ID":"1b914bf5-662a-4c7e-8e0c-01db6bb69e82","Type":"ContainerStarted","Data":"b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205"} Dec 05 13:55:10 crc kubenswrapper[4711]: I1205 13:55:10.351505 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s7hkr" podStartSLOduration=2.55939398 podStartE2EDuration="6.351481111s" podCreationTimestamp="2025-12-05 13:55:04 +0000 UTC" firstStartedPulling="2025-12-05 13:55:06.262453342 +0000 UTC m=+6351.846775672" lastFinishedPulling="2025-12-05 13:55:10.054540453 +0000 UTC m=+6355.638862803" observedRunningTime="2025-12-05 13:55:10.345328261 +0000 UTC m=+6355.929650591" watchObservedRunningTime="2025-12-05 13:55:10.351481111 +0000 UTC m=+6355.935803451" Dec 05 13:55:15 crc kubenswrapper[4711]: I1205 13:55:14.999156 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:15 crc kubenswrapper[4711]: I1205 13:55:14.999547 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:15 crc kubenswrapper[4711]: I1205 13:55:15.058928 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:15 crc kubenswrapper[4711]: I1205 13:55:15.412447 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:15 crc kubenswrapper[4711]: I1205 13:55:15.472252 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s7hkr"] Dec 05 13:55:17 crc kubenswrapper[4711]: I1205 13:55:17.381616 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s7hkr" podUID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerName="registry-server" containerID="cri-o://b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205" gracePeriod=2 Dec 05 13:55:17 crc kubenswrapper[4711]: I1205 13:55:17.889859 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.003145 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-utilities\") pod \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.003233 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9f272\" (UniqueName: \"kubernetes.io/projected/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-kube-api-access-9f272\") pod \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.003329 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-catalog-content\") pod \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\" (UID: \"1b914bf5-662a-4c7e-8e0c-01db6bb69e82\") " Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.004190 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-utilities" (OuterVolumeSpecName: "utilities") pod "1b914bf5-662a-4c7e-8e0c-01db6bb69e82" (UID: "1b914bf5-662a-4c7e-8e0c-01db6bb69e82"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.005078 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.011432 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-kube-api-access-9f272" (OuterVolumeSpecName: "kube-api-access-9f272") pod "1b914bf5-662a-4c7e-8e0c-01db6bb69e82" (UID: "1b914bf5-662a-4c7e-8e0c-01db6bb69e82"). InnerVolumeSpecName "kube-api-access-9f272". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.055054 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b914bf5-662a-4c7e-8e0c-01db6bb69e82" (UID: "1b914bf5-662a-4c7e-8e0c-01db6bb69e82"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.107333 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9f272\" (UniqueName: \"kubernetes.io/projected/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-kube-api-access-9f272\") on node \"crc\" DevicePath \"\"" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.107406 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b914bf5-662a-4c7e-8e0c-01db6bb69e82-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.300964 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.301018 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.392614 4711 generic.go:334] "Generic (PLEG): container finished" podID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerID="b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205" exitCode=0 Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.392663 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7hkr" event={"ID":"1b914bf5-662a-4c7e-8e0c-01db6bb69e82","Type":"ContainerDied","Data":"b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205"} Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.392695 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7hkr" event={"ID":"1b914bf5-662a-4c7e-8e0c-01db6bb69e82","Type":"ContainerDied","Data":"483457be5acb68cb8f35bef47d68a6eec7884440fc3421a86992d6c67ec230a0"} Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.392714 4711 scope.go:117] "RemoveContainer" containerID="b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.392878 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s7hkr" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.414117 4711 scope.go:117] "RemoveContainer" containerID="c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.437565 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s7hkr"] Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.450313 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s7hkr"] Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.456054 4711 scope.go:117] "RemoveContainer" containerID="67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.480510 4711 scope.go:117] "RemoveContainer" containerID="b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205" Dec 05 13:55:18 crc kubenswrapper[4711]: E1205 13:55:18.481049 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205\": container with ID starting with b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205 not found: ID does not exist" containerID="b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.481092 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205"} err="failed to get container status \"b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205\": rpc error: code = NotFound desc = could not find container \"b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205\": container with ID starting with b32898575fd75786fa5fbd30875374de25468b965764f52f4e2e19700fb4c205 not found: ID does not exist" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.481123 4711 scope.go:117] "RemoveContainer" containerID="c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5" Dec 05 13:55:18 crc kubenswrapper[4711]: E1205 13:55:18.481872 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5\": container with ID starting with c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5 not found: ID does not exist" containerID="c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.481911 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5"} err="failed to get container status \"c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5\": rpc error: code = NotFound desc = could not find container \"c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5\": container with ID starting with c5731111bf2ae7e684d280c32cf9a14335dc98a6c28b5d2e7f9a090ee78f9ac5 not found: ID does not exist" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.481927 4711 scope.go:117] "RemoveContainer" containerID="67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7" Dec 05 13:55:18 crc kubenswrapper[4711]: E1205 13:55:18.482989 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7\": container with ID starting with 67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7 not found: ID does not exist" containerID="67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.483019 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7"} err="failed to get container status \"67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7\": rpc error: code = NotFound desc = could not find container \"67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7\": container with ID starting with 67458067ae11033c7ca4c9d0b0fd3b76d7de36d6d78a199fd4d64558358ad2a7 not found: ID does not exist" Dec 05 13:55:18 crc kubenswrapper[4711]: I1205 13:55:18.703527 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" path="/var/lib/kubelet/pods/1b914bf5-662a-4c7e-8e0c-01db6bb69e82/volumes" Dec 05 13:55:48 crc kubenswrapper[4711]: I1205 13:55:48.301067 4711 patch_prober.go:28] interesting pod/machine-config-daemon-drklt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:55:48 crc kubenswrapper[4711]: I1205 13:55:48.301603 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:55:48 crc kubenswrapper[4711]: I1205 13:55:48.301656 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-drklt" Dec 05 13:55:48 crc kubenswrapper[4711]: I1205 13:55:48.302449 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12"} pod="openshift-machine-config-operator/machine-config-daemon-drklt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:55:48 crc kubenswrapper[4711]: I1205 13:55:48.302499 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerName="machine-config-daemon" containerID="cri-o://c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" gracePeriod=600 Dec 05 13:55:48 crc kubenswrapper[4711]: E1205 13:55:48.429915 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:55:48 crc kubenswrapper[4711]: I1205 13:55:48.744271 4711 generic.go:334] "Generic (PLEG): container finished" podID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" exitCode=0 Dec 05 13:55:48 crc kubenswrapper[4711]: I1205 13:55:48.744680 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerDied","Data":"c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12"} Dec 05 13:55:48 crc kubenswrapper[4711]: I1205 13:55:48.744734 4711 scope.go:117] "RemoveContainer" containerID="1244bad30887d8c80c8f4234338b30be7407f0295c05bb01ca7c173b5ecec36d" Dec 05 13:55:48 crc kubenswrapper[4711]: I1205 13:55:48.746270 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:55:48 crc kubenswrapper[4711]: E1205 13:55:48.746771 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:55:59 crc kubenswrapper[4711]: I1205 13:55:59.683738 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:55:59 crc kubenswrapper[4711]: E1205 13:55:59.684621 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:56:11 crc kubenswrapper[4711]: I1205 13:56:11.684237 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:56:11 crc kubenswrapper[4711]: E1205 13:56:11.685046 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:56:22 crc kubenswrapper[4711]: I1205 13:56:22.683145 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:56:22 crc kubenswrapper[4711]: E1205 13:56:22.683943 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:56:25 crc kubenswrapper[4711]: I1205 13:56:25.182741 4711 generic.go:334] "Generic (PLEG): container finished" podID="14620ce2-6538-49f6-8477-ed3b2c5076bb" containerID="d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9" exitCode=0 Dec 05 13:56:25 crc kubenswrapper[4711]: I1205 13:56:25.182883 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4m758/must-gather-znh4x" event={"ID":"14620ce2-6538-49f6-8477-ed3b2c5076bb","Type":"ContainerDied","Data":"d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9"} Dec 05 13:56:25 crc kubenswrapper[4711]: I1205 13:56:25.184005 4711 scope.go:117] "RemoveContainer" containerID="d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9" Dec 05 13:56:25 crc kubenswrapper[4711]: I1205 13:56:25.423789 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4m758_must-gather-znh4x_14620ce2-6538-49f6-8477-ed3b2c5076bb/gather/0.log" Dec 05 13:56:34 crc kubenswrapper[4711]: I1205 13:56:34.245007 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4m758/must-gather-znh4x"] Dec 05 13:56:34 crc kubenswrapper[4711]: I1205 13:56:34.245881 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-4m758/must-gather-znh4x" podUID="14620ce2-6538-49f6-8477-ed3b2c5076bb" containerName="copy" containerID="cri-o://a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd" gracePeriod=2 Dec 05 13:56:34 crc kubenswrapper[4711]: I1205 13:56:34.257865 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4m758/must-gather-znh4x"] Dec 05 13:56:34 crc kubenswrapper[4711]: I1205 13:56:34.683267 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4m758_must-gather-znh4x_14620ce2-6538-49f6-8477-ed3b2c5076bb/copy/0.log" Dec 05 13:56:34 crc kubenswrapper[4711]: I1205 13:56:34.683774 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:56:34 crc kubenswrapper[4711]: I1205 13:56:34.858298 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znkft\" (UniqueName: \"kubernetes.io/projected/14620ce2-6538-49f6-8477-ed3b2c5076bb-kube-api-access-znkft\") pod \"14620ce2-6538-49f6-8477-ed3b2c5076bb\" (UID: \"14620ce2-6538-49f6-8477-ed3b2c5076bb\") " Dec 05 13:56:34 crc kubenswrapper[4711]: I1205 13:56:34.858421 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/14620ce2-6538-49f6-8477-ed3b2c5076bb-must-gather-output\") pod \"14620ce2-6538-49f6-8477-ed3b2c5076bb\" (UID: \"14620ce2-6538-49f6-8477-ed3b2c5076bb\") " Dec 05 13:56:34 crc kubenswrapper[4711]: I1205 13:56:34.864773 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14620ce2-6538-49f6-8477-ed3b2c5076bb-kube-api-access-znkft" (OuterVolumeSpecName: "kube-api-access-znkft") pod "14620ce2-6538-49f6-8477-ed3b2c5076bb" (UID: "14620ce2-6538-49f6-8477-ed3b2c5076bb"). InnerVolumeSpecName "kube-api-access-znkft". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:56:34 crc kubenswrapper[4711]: I1205 13:56:34.961310 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znkft\" (UniqueName: \"kubernetes.io/projected/14620ce2-6538-49f6-8477-ed3b2c5076bb-kube-api-access-znkft\") on node \"crc\" DevicePath \"\"" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.046240 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14620ce2-6538-49f6-8477-ed3b2c5076bb-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "14620ce2-6538-49f6-8477-ed3b2c5076bb" (UID: "14620ce2-6538-49f6-8477-ed3b2c5076bb"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.063817 4711 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/14620ce2-6538-49f6-8477-ed3b2c5076bb-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.289828 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4m758_must-gather-znh4x_14620ce2-6538-49f6-8477-ed3b2c5076bb/copy/0.log" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.290231 4711 generic.go:334] "Generic (PLEG): container finished" podID="14620ce2-6538-49f6-8477-ed3b2c5076bb" containerID="a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd" exitCode=143 Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.290288 4711 scope.go:117] "RemoveContainer" containerID="a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.290311 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4m758/must-gather-znh4x" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.322473 4711 scope.go:117] "RemoveContainer" containerID="d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.401094 4711 scope.go:117] "RemoveContainer" containerID="a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd" Dec 05 13:56:35 crc kubenswrapper[4711]: E1205 13:56:35.401839 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd\": container with ID starting with a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd not found: ID does not exist" containerID="a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.401877 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd"} err="failed to get container status \"a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd\": rpc error: code = NotFound desc = could not find container \"a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd\": container with ID starting with a06410ba04a2d679c423da2998cea283ddd841e0e20fec9f61cb7a66cf0851dd not found: ID does not exist" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.401911 4711 scope.go:117] "RemoveContainer" containerID="d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9" Dec 05 13:56:35 crc kubenswrapper[4711]: E1205 13:56:35.402441 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9\": container with ID starting with d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9 not found: ID does not exist" containerID="d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9" Dec 05 13:56:35 crc kubenswrapper[4711]: I1205 13:56:35.402474 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9"} err="failed to get container status \"d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9\": rpc error: code = NotFound desc = could not find container \"d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9\": container with ID starting with d82f4286b8c152b71edddebf370dc171de4b027650644d1f2cc86a64245527d9 not found: ID does not exist" Dec 05 13:56:36 crc kubenswrapper[4711]: I1205 13:56:36.696934 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14620ce2-6538-49f6-8477-ed3b2c5076bb" path="/var/lib/kubelet/pods/14620ce2-6538-49f6-8477-ed3b2c5076bb/volumes" Dec 05 13:56:37 crc kubenswrapper[4711]: I1205 13:56:37.683837 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:56:37 crc kubenswrapper[4711]: E1205 13:56:37.684570 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:56:52 crc kubenswrapper[4711]: I1205 13:56:52.683867 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:56:52 crc kubenswrapper[4711]: E1205 13:56:52.686668 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:57:03 crc kubenswrapper[4711]: I1205 13:57:03.683712 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:57:03 crc kubenswrapper[4711]: E1205 13:57:03.684562 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:57:17 crc kubenswrapper[4711]: I1205 13:57:17.682770 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:57:17 crc kubenswrapper[4711]: E1205 13:57:17.684602 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:57:29 crc kubenswrapper[4711]: I1205 13:57:29.683663 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:57:29 crc kubenswrapper[4711]: E1205 13:57:29.684620 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:57:32 crc kubenswrapper[4711]: I1205 13:57:32.726197 4711 scope.go:117] "RemoveContainer" containerID="a7a7940643467f280bb1e07dced9cce230ca364d387122d5c55a73eca2f4a1a3" Dec 05 13:57:41 crc kubenswrapper[4711]: I1205 13:57:41.683408 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:57:41 crc kubenswrapper[4711]: E1205 13:57:41.684094 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:57:53 crc kubenswrapper[4711]: I1205 13:57:53.684745 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:57:53 crc kubenswrapper[4711]: E1205 13:57:53.685527 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:58:08 crc kubenswrapper[4711]: I1205 13:58:08.692074 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:58:08 crc kubenswrapper[4711]: E1205 13:58:08.693261 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:58:21 crc kubenswrapper[4711]: I1205 13:58:21.684598 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:58:21 crc kubenswrapper[4711]: E1205 13:58:21.685666 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:58:32 crc kubenswrapper[4711]: I1205 13:58:32.683107 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:58:32 crc kubenswrapper[4711]: E1205 13:58:32.683757 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:58:44 crc kubenswrapper[4711]: I1205 13:58:44.684516 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:58:44 crc kubenswrapper[4711]: E1205 13:58:44.685382 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:58:55 crc kubenswrapper[4711]: I1205 13:58:55.683764 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:58:55 crc kubenswrapper[4711]: E1205 13:58:55.687018 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:59:10 crc kubenswrapper[4711]: I1205 13:59:10.684442 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:59:10 crc kubenswrapper[4711]: E1205 13:59:10.685262 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:59:21 crc kubenswrapper[4711]: I1205 13:59:21.684489 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:59:21 crc kubenswrapper[4711]: E1205 13:59:21.685296 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:59:32 crc kubenswrapper[4711]: I1205 13:59:32.682891 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:59:32 crc kubenswrapper[4711]: E1205 13:59:32.683636 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:59:43 crc kubenswrapper[4711]: I1205 13:59:43.683186 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:59:43 crc kubenswrapper[4711]: E1205 13:59:43.683997 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 13:59:55 crc kubenswrapper[4711]: I1205 13:59:55.683379 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 13:59:55 crc kubenswrapper[4711]: E1205 13:59:55.685961 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.166195 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq"] Dec 05 14:00:00 crc kubenswrapper[4711]: E1205 14:00:00.167716 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerName="extract-utilities" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.167731 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerName="extract-utilities" Dec 05 14:00:00 crc kubenswrapper[4711]: E1205 14:00:00.167750 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14620ce2-6538-49f6-8477-ed3b2c5076bb" containerName="gather" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.167756 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="14620ce2-6538-49f6-8477-ed3b2c5076bb" containerName="gather" Dec 05 14:00:00 crc kubenswrapper[4711]: E1205 14:00:00.167769 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerName="registry-server" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.167775 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerName="registry-server" Dec 05 14:00:00 crc kubenswrapper[4711]: E1205 14:00:00.167792 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14620ce2-6538-49f6-8477-ed3b2c5076bb" containerName="copy" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.167797 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="14620ce2-6538-49f6-8477-ed3b2c5076bb" containerName="copy" Dec 05 14:00:00 crc kubenswrapper[4711]: E1205 14:00:00.167828 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerName="extract-content" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.167834 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerName="extract-content" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.168006 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b914bf5-662a-4c7e-8e0c-01db6bb69e82" containerName="registry-server" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.168020 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="14620ce2-6538-49f6-8477-ed3b2c5076bb" containerName="copy" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.168032 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="14620ce2-6538-49f6-8477-ed3b2c5076bb" containerName="gather" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.168728 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.172953 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.174597 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.190045 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq"] Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.269106 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r89j\" (UniqueName: \"kubernetes.io/projected/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-kube-api-access-9r89j\") pod \"collect-profiles-29415720-xjwlq\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.269181 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-config-volume\") pod \"collect-profiles-29415720-xjwlq\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.269243 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-secret-volume\") pod \"collect-profiles-29415720-xjwlq\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.370993 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-secret-volume\") pod \"collect-profiles-29415720-xjwlq\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.371172 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r89j\" (UniqueName: \"kubernetes.io/projected/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-kube-api-access-9r89j\") pod \"collect-profiles-29415720-xjwlq\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.371218 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-config-volume\") pod \"collect-profiles-29415720-xjwlq\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.372213 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-config-volume\") pod \"collect-profiles-29415720-xjwlq\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.395500 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-secret-volume\") pod \"collect-profiles-29415720-xjwlq\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.402041 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r89j\" (UniqueName: \"kubernetes.io/projected/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-kube-api-access-9r89j\") pod \"collect-profiles-29415720-xjwlq\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.510381 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:00 crc kubenswrapper[4711]: I1205 14:00:00.998016 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq"] Dec 05 14:00:01 crc kubenswrapper[4711]: I1205 14:00:01.412015 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" event={"ID":"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333","Type":"ContainerStarted","Data":"27776963d5b292f5f85002a764be67105d8ad6b74bd2e3890603ffc6c694a514"} Dec 05 14:00:01 crc kubenswrapper[4711]: I1205 14:00:01.412068 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" event={"ID":"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333","Type":"ContainerStarted","Data":"865fb2b29ef9293a8145417369df2dd9b026a64dbd7834eadca5be68dcf4949a"} Dec 05 14:00:01 crc kubenswrapper[4711]: I1205 14:00:01.433276 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" podStartSLOduration=1.433254466 podStartE2EDuration="1.433254466s" podCreationTimestamp="2025-12-05 14:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 14:00:01.426684054 +0000 UTC m=+6647.011006414" watchObservedRunningTime="2025-12-05 14:00:01.433254466 +0000 UTC m=+6647.017576796" Dec 05 14:00:02 crc kubenswrapper[4711]: I1205 14:00:02.423638 4711 generic.go:334] "Generic (PLEG): container finished" podID="8c8b7eeb-edb0-42c7-81f1-21d2b64cb333" containerID="27776963d5b292f5f85002a764be67105d8ad6b74bd2e3890603ffc6c694a514" exitCode=0 Dec 05 14:00:02 crc kubenswrapper[4711]: I1205 14:00:02.423738 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" event={"ID":"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333","Type":"ContainerDied","Data":"27776963d5b292f5f85002a764be67105d8ad6b74bd2e3890603ffc6c694a514"} Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.828182 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.843464 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r89j\" (UniqueName: \"kubernetes.io/projected/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-kube-api-access-9r89j\") pod \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.843838 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-secret-volume\") pod \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.843928 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-config-volume\") pod \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\" (UID: \"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333\") " Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.844858 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-config-volume" (OuterVolumeSpecName: "config-volume") pod "8c8b7eeb-edb0-42c7-81f1-21d2b64cb333" (UID: "8c8b7eeb-edb0-42c7-81f1-21d2b64cb333"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.851583 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8c8b7eeb-edb0-42c7-81f1-21d2b64cb333" (UID: "8c8b7eeb-edb0-42c7-81f1-21d2b64cb333"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.852543 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-kube-api-access-9r89j" (OuterVolumeSpecName: "kube-api-access-9r89j") pod "8c8b7eeb-edb0-42c7-81f1-21d2b64cb333" (UID: "8c8b7eeb-edb0-42c7-81f1-21d2b64cb333"). InnerVolumeSpecName "kube-api-access-9r89j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.946482 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r89j\" (UniqueName: \"kubernetes.io/projected/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-kube-api-access-9r89j\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.946519 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:03 crc kubenswrapper[4711]: I1205 14:00:03.946529 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c8b7eeb-edb0-42c7-81f1-21d2b64cb333-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:04 crc kubenswrapper[4711]: I1205 14:00:04.442503 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" event={"ID":"8c8b7eeb-edb0-42c7-81f1-21d2b64cb333","Type":"ContainerDied","Data":"865fb2b29ef9293a8145417369df2dd9b026a64dbd7834eadca5be68dcf4949a"} Dec 05 14:00:04 crc kubenswrapper[4711]: I1205 14:00:04.442540 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="865fb2b29ef9293a8145417369df2dd9b026a64dbd7834eadca5be68dcf4949a" Dec 05 14:00:04 crc kubenswrapper[4711]: I1205 14:00:04.442586 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-xjwlq" Dec 05 14:00:04 crc kubenswrapper[4711]: I1205 14:00:04.510331 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8"] Dec 05 14:00:04 crc kubenswrapper[4711]: I1205 14:00:04.519250 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-mnpr8"] Dec 05 14:00:04 crc kubenswrapper[4711]: I1205 14:00:04.693497 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12e2758f-eb96-403e-bbf8-6c7bbc3bc034" path="/var/lib/kubelet/pods/12e2758f-eb96-403e-bbf8-6c7bbc3bc034/volumes" Dec 05 14:00:06 crc kubenswrapper[4711]: I1205 14:00:06.683196 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 14:00:06 crc kubenswrapper[4711]: E1205 14:00:06.683716 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 14:00:20 crc kubenswrapper[4711]: I1205 14:00:20.684172 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 14:00:20 crc kubenswrapper[4711]: E1205 14:00:20.684873 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 14:00:32 crc kubenswrapper[4711]: I1205 14:00:32.861573 4711 scope.go:117] "RemoveContainer" containerID="2f36490362488c78986ad15bd4a09d20672bc23dbdaa3099b1ce39d317894436" Dec 05 14:00:35 crc kubenswrapper[4711]: I1205 14:00:35.684312 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 14:00:35 crc kubenswrapper[4711]: E1205 14:00:35.685514 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-drklt_openshift-machine-config-operator(47bc5fb6-f724-409c-9a04-3c5e50951dd0)\"" pod="openshift-machine-config-operator/machine-config-daemon-drklt" podUID="47bc5fb6-f724-409c-9a04-3c5e50951dd0" Dec 05 14:00:48 crc kubenswrapper[4711]: I1205 14:00:48.692489 4711 scope.go:117] "RemoveContainer" containerID="c01aec0de5970d5ce4b7514e3edc3c6e83d85d26c82ef31c197d49fd4b028e12" Dec 05 14:00:49 crc kubenswrapper[4711]: I1205 14:00:49.907953 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-drklt" event={"ID":"47bc5fb6-f724-409c-9a04-3c5e50951dd0","Type":"ContainerStarted","Data":"eff33ca60f8b21628a5daacb9649ecb09a0d966aee7fba5e731dc52a4c79c2dc"} Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.158533 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415721-rxpfc"] Dec 05 14:01:00 crc kubenswrapper[4711]: E1205 14:01:00.159843 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c8b7eeb-edb0-42c7-81f1-21d2b64cb333" containerName="collect-profiles" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.159863 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c8b7eeb-edb0-42c7-81f1-21d2b64cb333" containerName="collect-profiles" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.160234 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c8b7eeb-edb0-42c7-81f1-21d2b64cb333" containerName="collect-profiles" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.161500 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.197464 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415721-rxpfc"] Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.252124 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-fernet-keys\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.252479 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42nj2\" (UniqueName: \"kubernetes.io/projected/18636364-62fb-46ea-bd33-733ec99b6529-kube-api-access-42nj2\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.252637 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-combined-ca-bundle\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.252761 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-config-data\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.355057 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-combined-ca-bundle\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.355110 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-config-data\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.355206 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-fernet-keys\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.355257 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42nj2\" (UniqueName: \"kubernetes.io/projected/18636364-62fb-46ea-bd33-733ec99b6529-kube-api-access-42nj2\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.363174 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-fernet-keys\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.364174 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-combined-ca-bundle\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.365095 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-config-data\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.376840 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42nj2\" (UniqueName: \"kubernetes.io/projected/18636364-62fb-46ea-bd33-733ec99b6529-kube-api-access-42nj2\") pod \"keystone-cron-29415721-rxpfc\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.494759 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:00 crc kubenswrapper[4711]: I1205 14:01:00.973217 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415721-rxpfc"] Dec 05 14:01:01 crc kubenswrapper[4711]: I1205 14:01:01.016094 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415721-rxpfc" event={"ID":"18636364-62fb-46ea-bd33-733ec99b6529","Type":"ContainerStarted","Data":"43d88fd13871bb2f7d507f2c932d41edad6764ac5d4c80209a06726aee3ec989"} Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.029313 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415721-rxpfc" event={"ID":"18636364-62fb-46ea-bd33-733ec99b6529","Type":"ContainerStarted","Data":"abfa0482f89f8c9e17248e22b0029fcd1a5febf4001d85ec189c9b98e74333ea"} Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.049139 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415721-rxpfc" podStartSLOduration=2.049118356 podStartE2EDuration="2.049118356s" podCreationTimestamp="2025-12-05 14:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 14:01:02.042959305 +0000 UTC m=+6707.627281675" watchObservedRunningTime="2025-12-05 14:01:02.049118356 +0000 UTC m=+6707.633440686" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.530998 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kpn6p"] Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.533557 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.560835 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpn6p"] Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.603748 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-catalog-content\") pod \"redhat-marketplace-kpn6p\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.603839 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-utilities\") pod \"redhat-marketplace-kpn6p\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.603919 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xxxf\" (UniqueName: \"kubernetes.io/projected/ba95f659-4fa7-4757-bb85-5502b5a338cf-kube-api-access-2xxxf\") pod \"redhat-marketplace-kpn6p\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.705470 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-catalog-content\") pod \"redhat-marketplace-kpn6p\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.705568 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-utilities\") pod \"redhat-marketplace-kpn6p\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.705640 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xxxf\" (UniqueName: \"kubernetes.io/projected/ba95f659-4fa7-4757-bb85-5502b5a338cf-kube-api-access-2xxxf\") pod \"redhat-marketplace-kpn6p\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.706068 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-utilities\") pod \"redhat-marketplace-kpn6p\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.706344 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-catalog-content\") pod \"redhat-marketplace-kpn6p\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.733545 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xxxf\" (UniqueName: \"kubernetes.io/projected/ba95f659-4fa7-4757-bb85-5502b5a338cf-kube-api-access-2xxxf\") pod \"redhat-marketplace-kpn6p\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.744938 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j2x2p"] Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.781809 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.812644 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-catalog-content\") pod \"redhat-operators-j2x2p\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.812954 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-utilities\") pod \"redhat-operators-j2x2p\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.813366 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7h4l\" (UniqueName: \"kubernetes.io/projected/de36f048-c19b-4002-a1c5-e9058f997558-kube-api-access-j7h4l\") pod \"redhat-operators-j2x2p\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.817471 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j2x2p"] Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.864761 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.915885 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-catalog-content\") pod \"redhat-operators-j2x2p\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.916020 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-utilities\") pod \"redhat-operators-j2x2p\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.916122 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7h4l\" (UniqueName: \"kubernetes.io/projected/de36f048-c19b-4002-a1c5-e9058f997558-kube-api-access-j7h4l\") pod \"redhat-operators-j2x2p\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.919232 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-catalog-content\") pod \"redhat-operators-j2x2p\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.919548 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-utilities\") pod \"redhat-operators-j2x2p\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:02 crc kubenswrapper[4711]: I1205 14:01:02.939936 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7h4l\" (UniqueName: \"kubernetes.io/projected/de36f048-c19b-4002-a1c5-e9058f997558-kube-api-access-j7h4l\") pod \"redhat-operators-j2x2p\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:03 crc kubenswrapper[4711]: I1205 14:01:03.118903 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:03 crc kubenswrapper[4711]: W1205 14:01:03.473304 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba95f659_4fa7_4757_bb85_5502b5a338cf.slice/crio-d70ff7056b4e72824ca14030bcc15c87d735762c5987f31b8da143feded55a3e WatchSource:0}: Error finding container d70ff7056b4e72824ca14030bcc15c87d735762c5987f31b8da143feded55a3e: Status 404 returned error can't find the container with id d70ff7056b4e72824ca14030bcc15c87d735762c5987f31b8da143feded55a3e Dec 05 14:01:03 crc kubenswrapper[4711]: I1205 14:01:03.478541 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpn6p"] Dec 05 14:01:03 crc kubenswrapper[4711]: I1205 14:01:03.655612 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j2x2p"] Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.058189 4711 generic.go:334] "Generic (PLEG): container finished" podID="ba95f659-4fa7-4757-bb85-5502b5a338cf" containerID="fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910" exitCode=0 Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.058258 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpn6p" event={"ID":"ba95f659-4fa7-4757-bb85-5502b5a338cf","Type":"ContainerDied","Data":"fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910"} Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.058553 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpn6p" event={"ID":"ba95f659-4fa7-4757-bb85-5502b5a338cf","Type":"ContainerStarted","Data":"d70ff7056b4e72824ca14030bcc15c87d735762c5987f31b8da143feded55a3e"} Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.059971 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.061923 4711 generic.go:334] "Generic (PLEG): container finished" podID="de36f048-c19b-4002-a1c5-e9058f997558" containerID="0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a" exitCode=0 Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.061955 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2x2p" event={"ID":"de36f048-c19b-4002-a1c5-e9058f997558","Type":"ContainerDied","Data":"0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a"} Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.061977 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2x2p" event={"ID":"de36f048-c19b-4002-a1c5-e9058f997558","Type":"ContainerStarted","Data":"808806b72af207e4a0c64621e3993d3d700ccd1320b18afcc35869c588689eec"} Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.948219 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rbvld"] Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.951478 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:04 crc kubenswrapper[4711]: I1205 14:01:04.962910 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rbvld"] Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.063327 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6mst\" (UniqueName: \"kubernetes.io/projected/35b9495a-2d5d-42b6-a0ff-53061af03a88-kube-api-access-r6mst\") pod \"certified-operators-rbvld\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.063380 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-utilities\") pod \"certified-operators-rbvld\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.063434 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-catalog-content\") pod \"certified-operators-rbvld\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.073584 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpn6p" event={"ID":"ba95f659-4fa7-4757-bb85-5502b5a338cf","Type":"ContainerStarted","Data":"491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826"} Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.076519 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2x2p" event={"ID":"de36f048-c19b-4002-a1c5-e9058f997558","Type":"ContainerStarted","Data":"e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801"} Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.165061 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6mst\" (UniqueName: \"kubernetes.io/projected/35b9495a-2d5d-42b6-a0ff-53061af03a88-kube-api-access-r6mst\") pod \"certified-operators-rbvld\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.165099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-utilities\") pod \"certified-operators-rbvld\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.165125 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-catalog-content\") pod \"certified-operators-rbvld\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.165756 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-utilities\") pod \"certified-operators-rbvld\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.165776 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-catalog-content\") pod \"certified-operators-rbvld\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.183942 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6mst\" (UniqueName: \"kubernetes.io/projected/35b9495a-2d5d-42b6-a0ff-53061af03a88-kube-api-access-r6mst\") pod \"certified-operators-rbvld\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.272151 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:05 crc kubenswrapper[4711]: I1205 14:01:05.884066 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rbvld"] Dec 05 14:01:05 crc kubenswrapper[4711]: W1205 14:01:05.888498 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35b9495a_2d5d_42b6_a0ff_53061af03a88.slice/crio-78c0976579300d62a256ec9a77a45b1e1fe47c0b022b9be1623bbe2e78b763ea WatchSource:0}: Error finding container 78c0976579300d62a256ec9a77a45b1e1fe47c0b022b9be1623bbe2e78b763ea: Status 404 returned error can't find the container with id 78c0976579300d62a256ec9a77a45b1e1fe47c0b022b9be1623bbe2e78b763ea Dec 05 14:01:06 crc kubenswrapper[4711]: I1205 14:01:06.090178 4711 generic.go:334] "Generic (PLEG): container finished" podID="ba95f659-4fa7-4757-bb85-5502b5a338cf" containerID="491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826" exitCode=0 Dec 05 14:01:06 crc kubenswrapper[4711]: I1205 14:01:06.090253 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpn6p" event={"ID":"ba95f659-4fa7-4757-bb85-5502b5a338cf","Type":"ContainerDied","Data":"491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826"} Dec 05 14:01:06 crc kubenswrapper[4711]: I1205 14:01:06.097371 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbvld" event={"ID":"35b9495a-2d5d-42b6-a0ff-53061af03a88","Type":"ContainerStarted","Data":"78c0976579300d62a256ec9a77a45b1e1fe47c0b022b9be1623bbe2e78b763ea"} Dec 05 14:01:07 crc kubenswrapper[4711]: I1205 14:01:07.108949 4711 generic.go:334] "Generic (PLEG): container finished" podID="35b9495a-2d5d-42b6-a0ff-53061af03a88" containerID="6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2" exitCode=0 Dec 05 14:01:07 crc kubenswrapper[4711]: I1205 14:01:07.109126 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbvld" event={"ID":"35b9495a-2d5d-42b6-a0ff-53061af03a88","Type":"ContainerDied","Data":"6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2"} Dec 05 14:01:07 crc kubenswrapper[4711]: I1205 14:01:07.112538 4711 generic.go:334] "Generic (PLEG): container finished" podID="18636364-62fb-46ea-bd33-733ec99b6529" containerID="abfa0482f89f8c9e17248e22b0029fcd1a5febf4001d85ec189c9b98e74333ea" exitCode=0 Dec 05 14:01:07 crc kubenswrapper[4711]: I1205 14:01:07.112625 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415721-rxpfc" event={"ID":"18636364-62fb-46ea-bd33-733ec99b6529","Type":"ContainerDied","Data":"abfa0482f89f8c9e17248e22b0029fcd1a5febf4001d85ec189c9b98e74333ea"} Dec 05 14:01:07 crc kubenswrapper[4711]: I1205 14:01:07.116757 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpn6p" event={"ID":"ba95f659-4fa7-4757-bb85-5502b5a338cf","Type":"ContainerStarted","Data":"fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f"} Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.160765 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kpn6p" podStartSLOduration=3.658323684 podStartE2EDuration="6.160742377s" podCreationTimestamp="2025-12-05 14:01:02 +0000 UTC" firstStartedPulling="2025-12-05 14:01:04.059762166 +0000 UTC m=+6709.644084496" lastFinishedPulling="2025-12-05 14:01:06.562180859 +0000 UTC m=+6712.146503189" observedRunningTime="2025-12-05 14:01:08.151715967 +0000 UTC m=+6713.736038297" watchObservedRunningTime="2025-12-05 14:01:08.160742377 +0000 UTC m=+6713.745064707" Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.548118 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.560354 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-config-data\") pod \"18636364-62fb-46ea-bd33-733ec99b6529\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.560471 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42nj2\" (UniqueName: \"kubernetes.io/projected/18636364-62fb-46ea-bd33-733ec99b6529-kube-api-access-42nj2\") pod \"18636364-62fb-46ea-bd33-733ec99b6529\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.560690 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-combined-ca-bundle\") pod \"18636364-62fb-46ea-bd33-733ec99b6529\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.560792 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-fernet-keys\") pod \"18636364-62fb-46ea-bd33-733ec99b6529\" (UID: \"18636364-62fb-46ea-bd33-733ec99b6529\") " Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.936736 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18636364-62fb-46ea-bd33-733ec99b6529-kube-api-access-42nj2" (OuterVolumeSpecName: "kube-api-access-42nj2") pod "18636364-62fb-46ea-bd33-733ec99b6529" (UID: "18636364-62fb-46ea-bd33-733ec99b6529"). InnerVolumeSpecName "kube-api-access-42nj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.936859 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "18636364-62fb-46ea-bd33-733ec99b6529" (UID: "18636364-62fb-46ea-bd33-733ec99b6529"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.960977 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18636364-62fb-46ea-bd33-733ec99b6529" (UID: "18636364-62fb-46ea-bd33-733ec99b6529"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.976642 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42nj2\" (UniqueName: \"kubernetes.io/projected/18636364-62fb-46ea-bd33-733ec99b6529-kube-api-access-42nj2\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.976685 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.976697 4711 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:08 crc kubenswrapper[4711]: I1205 14:01:08.994931 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-config-data" (OuterVolumeSpecName: "config-data") pod "18636364-62fb-46ea-bd33-733ec99b6529" (UID: "18636364-62fb-46ea-bd33-733ec99b6529"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:01:09 crc kubenswrapper[4711]: I1205 14:01:09.078889 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18636364-62fb-46ea-bd33-733ec99b6529-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:09 crc kubenswrapper[4711]: I1205 14:01:09.142999 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415721-rxpfc" event={"ID":"18636364-62fb-46ea-bd33-733ec99b6529","Type":"ContainerDied","Data":"43d88fd13871bb2f7d507f2c932d41edad6764ac5d4c80209a06726aee3ec989"} Dec 05 14:01:09 crc kubenswrapper[4711]: I1205 14:01:09.143045 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43d88fd13871bb2f7d507f2c932d41edad6764ac5d4c80209a06726aee3ec989" Dec 05 14:01:09 crc kubenswrapper[4711]: I1205 14:01:09.143072 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415721-rxpfc" Dec 05 14:01:10 crc kubenswrapper[4711]: I1205 14:01:10.157436 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbvld" event={"ID":"35b9495a-2d5d-42b6-a0ff-53061af03a88","Type":"ContainerStarted","Data":"065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33"} Dec 05 14:01:10 crc kubenswrapper[4711]: I1205 14:01:10.162497 4711 generic.go:334] "Generic (PLEG): container finished" podID="de36f048-c19b-4002-a1c5-e9058f997558" containerID="e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801" exitCode=0 Dec 05 14:01:10 crc kubenswrapper[4711]: I1205 14:01:10.162540 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2x2p" event={"ID":"de36f048-c19b-4002-a1c5-e9058f997558","Type":"ContainerDied","Data":"e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801"} Dec 05 14:01:12 crc kubenswrapper[4711]: I1205 14:01:12.183751 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2x2p" event={"ID":"de36f048-c19b-4002-a1c5-e9058f997558","Type":"ContainerStarted","Data":"4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5"} Dec 05 14:01:12 crc kubenswrapper[4711]: I1205 14:01:12.185587 4711 generic.go:334] "Generic (PLEG): container finished" podID="35b9495a-2d5d-42b6-a0ff-53061af03a88" containerID="065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33" exitCode=0 Dec 05 14:01:12 crc kubenswrapper[4711]: I1205 14:01:12.185625 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbvld" event={"ID":"35b9495a-2d5d-42b6-a0ff-53061af03a88","Type":"ContainerDied","Data":"065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33"} Dec 05 14:01:12 crc kubenswrapper[4711]: I1205 14:01:12.208283 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j2x2p" podStartSLOduration=2.896928834 podStartE2EDuration="10.20826034s" podCreationTimestamp="2025-12-05 14:01:02 +0000 UTC" firstStartedPulling="2025-12-05 14:01:04.063669271 +0000 UTC m=+6709.647991601" lastFinishedPulling="2025-12-05 14:01:11.375000777 +0000 UTC m=+6716.959323107" observedRunningTime="2025-12-05 14:01:12.203732569 +0000 UTC m=+6717.788054899" watchObservedRunningTime="2025-12-05 14:01:12.20826034 +0000 UTC m=+6717.792582690" Dec 05 14:01:12 crc kubenswrapper[4711]: I1205 14:01:12.866527 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:12 crc kubenswrapper[4711]: I1205 14:01:12.866867 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:12 crc kubenswrapper[4711]: I1205 14:01:12.912466 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:13 crc kubenswrapper[4711]: I1205 14:01:13.123185 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:13 crc kubenswrapper[4711]: I1205 14:01:13.123232 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:13 crc kubenswrapper[4711]: I1205 14:01:13.209497 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbvld" event={"ID":"35b9495a-2d5d-42b6-a0ff-53061af03a88","Type":"ContainerStarted","Data":"8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5"} Dec 05 14:01:13 crc kubenswrapper[4711]: I1205 14:01:13.229358 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rbvld" podStartSLOduration=3.761144073 podStartE2EDuration="9.229340536s" podCreationTimestamp="2025-12-05 14:01:04 +0000 UTC" firstStartedPulling="2025-12-05 14:01:07.110876136 +0000 UTC m=+6712.695198466" lastFinishedPulling="2025-12-05 14:01:12.579072599 +0000 UTC m=+6718.163394929" observedRunningTime="2025-12-05 14:01:13.22866712 +0000 UTC m=+6718.812989440" watchObservedRunningTime="2025-12-05 14:01:13.229340536 +0000 UTC m=+6718.813662866" Dec 05 14:01:13 crc kubenswrapper[4711]: I1205 14:01:13.261895 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:14 crc kubenswrapper[4711]: I1205 14:01:14.173615 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-j2x2p" podUID="de36f048-c19b-4002-a1c5-e9058f997558" containerName="registry-server" probeResult="failure" output=< Dec 05 14:01:14 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Dec 05 14:01:14 crc kubenswrapper[4711]: > Dec 05 14:01:15 crc kubenswrapper[4711]: I1205 14:01:15.272619 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:15 crc kubenswrapper[4711]: I1205 14:01:15.272997 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:15 crc kubenswrapper[4711]: I1205 14:01:15.325243 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.128129 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpn6p"] Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.128559 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kpn6p" podUID="ba95f659-4fa7-4757-bb85-5502b5a338cf" containerName="registry-server" containerID="cri-o://fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f" gracePeriod=2 Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.612301 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.763779 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-utilities\") pod \"ba95f659-4fa7-4757-bb85-5502b5a338cf\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.763942 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-catalog-content\") pod \"ba95f659-4fa7-4757-bb85-5502b5a338cf\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.763971 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xxxf\" (UniqueName: \"kubernetes.io/projected/ba95f659-4fa7-4757-bb85-5502b5a338cf-kube-api-access-2xxxf\") pod \"ba95f659-4fa7-4757-bb85-5502b5a338cf\" (UID: \"ba95f659-4fa7-4757-bb85-5502b5a338cf\") " Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.765784 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-utilities" (OuterVolumeSpecName: "utilities") pod "ba95f659-4fa7-4757-bb85-5502b5a338cf" (UID: "ba95f659-4fa7-4757-bb85-5502b5a338cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.781529 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba95f659-4fa7-4757-bb85-5502b5a338cf" (UID: "ba95f659-4fa7-4757-bb85-5502b5a338cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.794597 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba95f659-4fa7-4757-bb85-5502b5a338cf-kube-api-access-2xxxf" (OuterVolumeSpecName: "kube-api-access-2xxxf") pod "ba95f659-4fa7-4757-bb85-5502b5a338cf" (UID: "ba95f659-4fa7-4757-bb85-5502b5a338cf"). InnerVolumeSpecName "kube-api-access-2xxxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.867833 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.867880 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba95f659-4fa7-4757-bb85-5502b5a338cf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:17 crc kubenswrapper[4711]: I1205 14:01:17.867893 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xxxf\" (UniqueName: \"kubernetes.io/projected/ba95f659-4fa7-4757-bb85-5502b5a338cf-kube-api-access-2xxxf\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.254305 4711 generic.go:334] "Generic (PLEG): container finished" podID="ba95f659-4fa7-4757-bb85-5502b5a338cf" containerID="fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f" exitCode=0 Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.254347 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpn6p" event={"ID":"ba95f659-4fa7-4757-bb85-5502b5a338cf","Type":"ContainerDied","Data":"fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f"} Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.254367 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kpn6p" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.254401 4711 scope.go:117] "RemoveContainer" containerID="fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.254375 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpn6p" event={"ID":"ba95f659-4fa7-4757-bb85-5502b5a338cf","Type":"ContainerDied","Data":"d70ff7056b4e72824ca14030bcc15c87d735762c5987f31b8da143feded55a3e"} Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.286988 4711 scope.go:117] "RemoveContainer" containerID="491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.289644 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpn6p"] Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.299356 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpn6p"] Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.309080 4711 scope.go:117] "RemoveContainer" containerID="fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.354013 4711 scope.go:117] "RemoveContainer" containerID="fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f" Dec 05 14:01:18 crc kubenswrapper[4711]: E1205 14:01:18.354502 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f\": container with ID starting with fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f not found: ID does not exist" containerID="fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.354541 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f"} err="failed to get container status \"fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f\": rpc error: code = NotFound desc = could not find container \"fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f\": container with ID starting with fefbfc37ef81b26411ad60393f272eee05f557f057475d1d794c71a706f3961f not found: ID does not exist" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.354569 4711 scope.go:117] "RemoveContainer" containerID="491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826" Dec 05 14:01:18 crc kubenswrapper[4711]: E1205 14:01:18.355002 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826\": container with ID starting with 491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826 not found: ID does not exist" containerID="491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.355060 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826"} err="failed to get container status \"491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826\": rpc error: code = NotFound desc = could not find container \"491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826\": container with ID starting with 491b6f086f9956fbbdba48e262cd16925f1f17eaf1118f10538c6c6bc80b8826 not found: ID does not exist" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.355103 4711 scope.go:117] "RemoveContainer" containerID="fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910" Dec 05 14:01:18 crc kubenswrapper[4711]: E1205 14:01:18.355463 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910\": container with ID starting with fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910 not found: ID does not exist" containerID="fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.355494 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910"} err="failed to get container status \"fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910\": rpc error: code = NotFound desc = could not find container \"fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910\": container with ID starting with fb7c35311e742d05a808124df80f7bcd1d5b72a3c568f89eb250736f101f9910 not found: ID does not exist" Dec 05 14:01:18 crc kubenswrapper[4711]: I1205 14:01:18.700789 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba95f659-4fa7-4757-bb85-5502b5a338cf" path="/var/lib/kubelet/pods/ba95f659-4fa7-4757-bb85-5502b5a338cf/volumes" Dec 05 14:01:23 crc kubenswrapper[4711]: I1205 14:01:23.178277 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:23 crc kubenswrapper[4711]: I1205 14:01:23.225834 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:23 crc kubenswrapper[4711]: I1205 14:01:23.417475 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j2x2p"] Dec 05 14:01:24 crc kubenswrapper[4711]: I1205 14:01:24.331483 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-j2x2p" podUID="de36f048-c19b-4002-a1c5-e9058f997558" containerName="registry-server" containerID="cri-o://4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5" gracePeriod=2 Dec 05 14:01:24 crc kubenswrapper[4711]: I1205 14:01:24.990050 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.122739 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-catalog-content\") pod \"de36f048-c19b-4002-a1c5-e9058f997558\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.122936 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-utilities\") pod \"de36f048-c19b-4002-a1c5-e9058f997558\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.123019 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7h4l\" (UniqueName: \"kubernetes.io/projected/de36f048-c19b-4002-a1c5-e9058f997558-kube-api-access-j7h4l\") pod \"de36f048-c19b-4002-a1c5-e9058f997558\" (UID: \"de36f048-c19b-4002-a1c5-e9058f997558\") " Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.123766 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-utilities" (OuterVolumeSpecName: "utilities") pod "de36f048-c19b-4002-a1c5-e9058f997558" (UID: "de36f048-c19b-4002-a1c5-e9058f997558"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.134612 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de36f048-c19b-4002-a1c5-e9058f997558-kube-api-access-j7h4l" (OuterVolumeSpecName: "kube-api-access-j7h4l") pod "de36f048-c19b-4002-a1c5-e9058f997558" (UID: "de36f048-c19b-4002-a1c5-e9058f997558"). InnerVolumeSpecName "kube-api-access-j7h4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.225148 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.225205 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7h4l\" (UniqueName: \"kubernetes.io/projected/de36f048-c19b-4002-a1c5-e9058f997558-kube-api-access-j7h4l\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.253944 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de36f048-c19b-4002-a1c5-e9058f997558" (UID: "de36f048-c19b-4002-a1c5-e9058f997558"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.321793 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.326432 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36f048-c19b-4002-a1c5-e9058f997558-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.342350 4711 generic.go:334] "Generic (PLEG): container finished" podID="de36f048-c19b-4002-a1c5-e9058f997558" containerID="4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5" exitCode=0 Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.342408 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2x2p" event={"ID":"de36f048-c19b-4002-a1c5-e9058f997558","Type":"ContainerDied","Data":"4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5"} Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.342452 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j2x2p" event={"ID":"de36f048-c19b-4002-a1c5-e9058f997558","Type":"ContainerDied","Data":"808806b72af207e4a0c64621e3993d3d700ccd1320b18afcc35869c588689eec"} Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.342456 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j2x2p" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.342472 4711 scope.go:117] "RemoveContainer" containerID="4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.361354 4711 scope.go:117] "RemoveContainer" containerID="e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.385677 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j2x2p"] Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.387844 4711 scope.go:117] "RemoveContainer" containerID="0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.397693 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-j2x2p"] Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.432275 4711 scope.go:117] "RemoveContainer" containerID="4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5" Dec 05 14:01:25 crc kubenswrapper[4711]: E1205 14:01:25.432613 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5\": container with ID starting with 4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5 not found: ID does not exist" containerID="4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.432641 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5"} err="failed to get container status \"4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5\": rpc error: code = NotFound desc = could not find container \"4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5\": container with ID starting with 4bc159ba0cb5511aaf1c64860e996c04ead2bea50592c6e1b251cbbda68c86d5 not found: ID does not exist" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.432660 4711 scope.go:117] "RemoveContainer" containerID="e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801" Dec 05 14:01:25 crc kubenswrapper[4711]: E1205 14:01:25.433001 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801\": container with ID starting with e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801 not found: ID does not exist" containerID="e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.433020 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801"} err="failed to get container status \"e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801\": rpc error: code = NotFound desc = could not find container \"e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801\": container with ID starting with e29f78cca5e6f5dd8b9b406ff9b04b3d5440f90c1cd6f84eb67e989b7c512801 not found: ID does not exist" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.433032 4711 scope.go:117] "RemoveContainer" containerID="0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a" Dec 05 14:01:25 crc kubenswrapper[4711]: E1205 14:01:25.433220 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a\": container with ID starting with 0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a not found: ID does not exist" containerID="0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a" Dec 05 14:01:25 crc kubenswrapper[4711]: I1205 14:01:25.433237 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a"} err="failed to get container status \"0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a\": rpc error: code = NotFound desc = could not find container \"0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a\": container with ID starting with 0c588936c5e387d10768ed65767b4df4e7979360575927c3adb055b2312fee7a not found: ID does not exist" Dec 05 14:01:26 crc kubenswrapper[4711]: I1205 14:01:26.694115 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de36f048-c19b-4002-a1c5-e9058f997558" path="/var/lib/kubelet/pods/de36f048-c19b-4002-a1c5-e9058f997558/volumes" Dec 05 14:01:27 crc kubenswrapper[4711]: I1205 14:01:27.427546 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rbvld"] Dec 05 14:01:27 crc kubenswrapper[4711]: I1205 14:01:27.427890 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rbvld" podUID="35b9495a-2d5d-42b6-a0ff-53061af03a88" containerName="registry-server" containerID="cri-o://8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5" gracePeriod=2 Dec 05 14:01:27 crc kubenswrapper[4711]: I1205 14:01:27.948809 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.085493 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6mst\" (UniqueName: \"kubernetes.io/projected/35b9495a-2d5d-42b6-a0ff-53061af03a88-kube-api-access-r6mst\") pod \"35b9495a-2d5d-42b6-a0ff-53061af03a88\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.085529 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-utilities\") pod \"35b9495a-2d5d-42b6-a0ff-53061af03a88\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.085603 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-catalog-content\") pod \"35b9495a-2d5d-42b6-a0ff-53061af03a88\" (UID: \"35b9495a-2d5d-42b6-a0ff-53061af03a88\") " Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.086434 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-utilities" (OuterVolumeSpecName: "utilities") pod "35b9495a-2d5d-42b6-a0ff-53061af03a88" (UID: "35b9495a-2d5d-42b6-a0ff-53061af03a88"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.091790 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35b9495a-2d5d-42b6-a0ff-53061af03a88-kube-api-access-r6mst" (OuterVolumeSpecName: "kube-api-access-r6mst") pod "35b9495a-2d5d-42b6-a0ff-53061af03a88" (UID: "35b9495a-2d5d-42b6-a0ff-53061af03a88"). InnerVolumeSpecName "kube-api-access-r6mst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.139217 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35b9495a-2d5d-42b6-a0ff-53061af03a88" (UID: "35b9495a-2d5d-42b6-a0ff-53061af03a88"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.188181 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6mst\" (UniqueName: \"kubernetes.io/projected/35b9495a-2d5d-42b6-a0ff-53061af03a88-kube-api-access-r6mst\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.188218 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.188233 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b9495a-2d5d-42b6-a0ff-53061af03a88-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.374890 4711 generic.go:334] "Generic (PLEG): container finished" podID="35b9495a-2d5d-42b6-a0ff-53061af03a88" containerID="8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5" exitCode=0 Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.375028 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbvld" event={"ID":"35b9495a-2d5d-42b6-a0ff-53061af03a88","Type":"ContainerDied","Data":"8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5"} Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.375091 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbvld" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.375211 4711 scope.go:117] "RemoveContainer" containerID="8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.375198 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbvld" event={"ID":"35b9495a-2d5d-42b6-a0ff-53061af03a88","Type":"ContainerDied","Data":"78c0976579300d62a256ec9a77a45b1e1fe47c0b022b9be1623bbe2e78b763ea"} Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.406994 4711 scope.go:117] "RemoveContainer" containerID="065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.418055 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rbvld"] Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.426589 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rbvld"] Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.437229 4711 scope.go:117] "RemoveContainer" containerID="6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.482046 4711 scope.go:117] "RemoveContainer" containerID="8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5" Dec 05 14:01:28 crc kubenswrapper[4711]: E1205 14:01:28.482481 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5\": container with ID starting with 8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5 not found: ID does not exist" containerID="8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.482514 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5"} err="failed to get container status \"8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5\": rpc error: code = NotFound desc = could not find container \"8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5\": container with ID starting with 8a130eefeccc5bbf40e428809aa6b0eedfb47925866e5e51c08ef33b669303e5 not found: ID does not exist" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.482537 4711 scope.go:117] "RemoveContainer" containerID="065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33" Dec 05 14:01:28 crc kubenswrapper[4711]: E1205 14:01:28.482851 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33\": container with ID starting with 065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33 not found: ID does not exist" containerID="065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.482906 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33"} err="failed to get container status \"065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33\": rpc error: code = NotFound desc = could not find container \"065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33\": container with ID starting with 065481caf0bdd143d890db43c7cee78cec5fe20bf2c340e3fcff59dfcd285c33 not found: ID does not exist" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.482944 4711 scope.go:117] "RemoveContainer" containerID="6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2" Dec 05 14:01:28 crc kubenswrapper[4711]: E1205 14:01:28.483252 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2\": container with ID starting with 6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2 not found: ID does not exist" containerID="6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.483278 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2"} err="failed to get container status \"6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2\": rpc error: code = NotFound desc = could not find container \"6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2\": container with ID starting with 6c0f0ebc6d4e757695e56dfd5270d629a80a06df68aca7510273a0f46b35aec2 not found: ID does not exist" Dec 05 14:01:28 crc kubenswrapper[4711]: I1205 14:01:28.695581 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35b9495a-2d5d-42b6-a0ff-53061af03a88" path="/var/lib/kubelet/pods/35b9495a-2d5d-42b6-a0ff-53061af03a88/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114563012024443 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114563013017361 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114545151016507 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114545151015457 5ustar corecore